Author: brock Date: Fri Oct 10 23:20:01 2014 New Revision: 1631005 URL: http://svn.apache.org/r1631005 Log: HIVE-8338 - Add ip and command to semantic analyzer hook context (Mohit Sabharwal via Brock)
Modified: hive/trunk/itests/hive-minikdc/pom.xml hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java Modified: hive/trunk/itests/hive-minikdc/pom.xml URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/pom.xml?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/itests/hive-minikdc/pom.xml (original) +++ hive/trunk/itests/hive-minikdc/pom.xml Fri Oct 10 23:20:01 2014 @@ -60,6 +60,13 @@ </dependency> <dependency> <groupId>org.apache.hive</groupId> + <artifactId>hive-it-unit</artifactId> + <version>${project.version}</version> + <scope>test</scope> + <classifier>tests</classifier> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>${project.version}</version> <scope>test</scope> Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java (original) +++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java Fri Oct 10 23:20:01 2014 @@ -28,73 +28,32 @@ import junit.framework.Assert; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext; -import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook; +import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Tests information retrieved from hooks, in Kerberos mode. */ public class TestHs2HooksWithMiniKdc { - private static final Logger LOG = LoggerFactory.getLogger(TestHs2HooksWithMiniKdc.class); - - public static class PostExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; - - public void run(HookContext hookContext) { - try { - if (hookContext.getHookType().equals(HookType.POST_EXEC_HOOK)) { - ipAddress = hookContext.getIpAddress(); - userName = hookContext.getUserName(); - operation = hookContext.getOperationName(); - } - } catch (Throwable t) { - LOG.error("Error in PostExecHook: " + t, t); - error = t; - } - } - } - - public static class PreExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; - - public void run(HookContext hookContext) { - try { - if (hookContext.getHookType().equals(HookType.PRE_EXEC_HOOK)) { - ipAddress = hookContext.getIpAddress(); - userName = hookContext.getUserName(); - operation = hookContext.getOperationName(); - } - } catch (Throwable t) { - LOG.error("Error in PreExecHook: " + t, t); - error = t; - } - } - } private static MiniHS2 miniHS2 = null; private static MiniHiveKdc miniHiveKdc = null; private static Map<String, String> confOverlay = new HashMap<String, String>(); private Connection hs2Conn; @BeforeClass - public static void beforeTest() throws Exception { + public static void setUpBeforeClass() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); confOverlay.put(ConfVars.POSTEXECHOOKS.varname, PostExecHook.class.getName()); confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName()); + confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, + SemanticAnalysisHook.class.getName()); HiveConf hiveConf = new HiveConf(); miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); @@ -102,12 +61,30 @@ public class TestHs2HooksWithMiniKdc { miniHS2.start(confOverlay); } + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + @Before - public void setUp() throws Exception { + public void setUpTest() throws Exception { + PreExecHook.userName = null; + PreExecHook.ipAddress = null; + PreExecHook.operation = null; + PreExecHook.error = null; + PostExecHook.userName = null; + PostExecHook.ipAddress = null; + PostExecHook.operation = null; + PostExecHook.error = null; + SemanticAnalysisHook.userName = null; + SemanticAnalysisHook.ipAddress = null; + SemanticAnalysisHook.command = null; + SemanticAnalysisHook.preAnalyzeError = null; + SemanticAnalysisHook.postAnalyzeError = null; } @After - public void tearDown() throws Exception { + public void tearDownTest() throws Exception { if (hs2Conn != null) { try { hs2Conn.close(); @@ -117,16 +94,11 @@ public class TestHs2HooksWithMiniKdc { } } - @AfterClass - public static void afterTest() throws Exception { - miniHS2.stop(); - } - /** - * Test get IpAddress and username from hook. + * Test that hook context properties are correctly set. */ @Test - public void testIpUserName() throws Throwable { + public void testHookContexts() throws Throwable { miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1); hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()); @@ -155,5 +127,24 @@ public class TestHs2HooksWithMiniKdc { Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName); Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1")); Assert.assertEquals("SHOWTABLES", PreExecHook.operation); + + error = SemanticAnalysisHook.preAnalyzeError; + if (error != null) { + throw error; + } + error = SemanticAnalysisHook.postAnalyzeError; + if (error != null) { + throw error; + } + + Assert.assertNotNull(SemanticAnalysisHook.ipAddress, + "semantic hook context ipaddress is null"); + Assert.assertNotNull(SemanticAnalysisHook.userName, + "semantic hook context userName is null"); + Assert.assertNotNull(SemanticAnalysisHook.command , + "semantic hook context command is null"); + Assert.assertTrue(SemanticAnalysisHook.ipAddress, + SemanticAnalysisHook.ipAddress.contains("127.0.0.1")); + Assert.assertEquals("show tables", SemanticAnalysisHook.command); } -} +} \ No newline at end of file Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java (original) +++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java Fri Oct 10 23:20:01 2014 @@ -19,8 +19,11 @@ //The tests here are heavily based on some timing, so there is some chance to fail. package org.apache.hadoop.hive.hooks; -import java.util.Properties; +import java.io.Serializable; +import java.lang.Override; import java.sql.Statement; +import java.util.List; +import java.util.Properties; import junit.framework.Assert; @@ -28,9 +31,15 @@ import org.apache.hadoop.hive.conf.HiveC import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.HookContext; import org.apache.hadoop.hive.ql.hooks.HookContext.HookType; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.service.server.HiveServer2; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; @@ -44,10 +53,10 @@ public class TestHs2Hooks { private static HiveServer2 hiveServer2; public static class PostExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; + public static String userName; + public static String ipAddress; + public static String operation; + public static Throwable error; public void run(HookContext hookContext) { try { @@ -64,10 +73,10 @@ public class TestHs2Hooks { } public static class PreExecHook implements ExecuteWithHookContext { - private static String userName; - private static String ipAddress; - private static String operation; - private static Throwable error; + public static String userName; + public static String ipAddress; + public static String operation; + public static Throwable error; public void run(HookContext hookContext) { try { @@ -83,6 +92,41 @@ public class TestHs2Hooks { } } + public static class SemanticAnalysisHook implements HiveSemanticAnalyzerHook { + public static String userName; + public static String command; + public static String ipAddress; + public static Throwable preAnalyzeError; + public static Throwable postAnalyzeError; + + @Override + public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, + ASTNode ast) throws SemanticException { + try { + userName = context.getUserName(); + ipAddress = context.getIpAddress(); + command = context.getCommand(); + } catch (Throwable t) { + LOG.error("Error in semantic analysis hook preAnalyze: " + t, t); + preAnalyzeError = t; + } + return ast; + } + + @Override + public void postAnalyze(HiveSemanticAnalyzerHookContext context, + List<Task<? extends Serializable>> rootTasks) throws SemanticException { + try { + userName = context.getUserName(); + ipAddress = context.getIpAddress(); + command = context.getCommand(); + } catch (Throwable t) { + LOG.error("Error in semantic analysis hook postAnalyze: " + t, t); + postAnalyzeError = t; + } + } + } + /** * @throws java.lang.Exception */ @@ -93,6 +137,8 @@ public class TestHs2Hooks { PreExecHook.class.getName()); hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, PostExecHook.class.getName()); + hiveConf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, + SemanticAnalysisHook.class.getName()); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); @@ -107,16 +153,32 @@ public class TestHs2Hooks { } } + @Before + public void setUpTest() throws Exception { + PreExecHook.userName = null; + PreExecHook.ipAddress = null; + PreExecHook.operation = null; + PreExecHook.error = null; + PostExecHook.userName = null; + PostExecHook.ipAddress = null; + PostExecHook.operation = null; + PostExecHook.error = null; + SemanticAnalysisHook.userName = null; + SemanticAnalysisHook.ipAddress = null; + SemanticAnalysisHook.command = null; + SemanticAnalysisHook.preAnalyzeError = null; + SemanticAnalysisHook.postAnalyzeError = null; + } + /** - * Test get IpAddress and username from hook. + * Test that hook context properties are correctly set. */ @Test - public void testIpUserName() throws Throwable { + public void testHookContexts() throws Throwable { Properties connProp = new Properties(); connProp.setProperty("user", System.getProperty("user.name")); connProp.setProperty("password", ""); HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp); - Statement stmt = connection.createStatement(); stmt.executeQuery("show databases"); stmt.executeQuery("show tables"); @@ -142,6 +204,24 @@ public class TestHs2Hooks { Assert.assertNotNull(PreExecHook.operation , "operation is null"); Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1")); Assert.assertEquals("SHOWTABLES", PreExecHook.operation); + + error = SemanticAnalysisHook.preAnalyzeError; + if (error != null) { + throw error; + } + error = SemanticAnalysisHook.postAnalyzeError; + if (error != null) { + throw error; + } + + Assert.assertNotNull(SemanticAnalysisHook.ipAddress, + "semantic hook context ipaddress is null"); + Assert.assertNotNull(SemanticAnalysisHook.userName, + "semantic hook context userName is null"); + Assert.assertNotNull(SemanticAnalysisHook.command , + "semantic hook context command is null"); + Assert.assertTrue(SemanticAnalysisHook.ipAddress, + SemanticAnalysisHook.ipAddress.contains("127.0.0.1")); + Assert.assertEquals("show tables", SemanticAnalysisHook.command); } } - Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Oct 10 23:20:01 2014 @@ -409,6 +409,8 @@ public class Driver implements CommandPr HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl(); hookCtx.setConf(conf); hookCtx.setUserName(userName); + hookCtx.setIpAddress(SessionState.get().getUserIpAddress()); + hookCtx.setCommand(command); for (HiveSemanticAnalyzerHook hook : saHooks) { tree = hook.preAnalyze(hookCtx, tree); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java Fri Oct 10 23:20:01 2014 @@ -57,4 +57,12 @@ public interface HiveSemanticAnalyzerHoo public String getUserName(); public void setUserName(String userName); + + public String getIpAddress(); + + public void setIpAddress(String ipAddress); + + public String getCommand(); + + public void setCommand(String command); } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java?rev=1631005&r1=1631004&r2=1631005&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java Fri Oct 10 23:20:01 2014 @@ -33,6 +33,8 @@ public class HiveSemanticAnalyzerHookCon Set<ReadEntity> inputs = null; Set<WriteEntity> outputs = null; private String userName; + private String ipAddress; + private String command; @Override public Hive getHive() throws HiveException { @@ -73,4 +75,24 @@ public class HiveSemanticAnalyzerHookCon public void setUserName(String userName) { this.userName = userName; } + + @Override + public String getIpAddress() { + return ipAddress; + } + + @Override + public void setIpAddress(String ipAddress) { + this.ipAddress = ipAddress; + } + + @Override + public String getCommand() { + return command; + } + + @Override + public void setCommand(String command) { + this.command = command; + } }