Repository: hive
Updated Branches:
  refs/heads/master 3e0d87f81 -> ee2d3189f


HIVE-11755 :Incorrect method called with Kerberos enabled in 
AccumuloStorageHandler (Josh Elser via Brock Noland)

Signed-off-by: Ashutosh Chauhan <hashut...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ee2d3189
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ee2d3189
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ee2d3189

Branch: refs/heads/master
Commit: ee2d3189ff9a7e8bd604b2e036d53632a9b4e616
Parents: 3e0d87f
Author: Josh Elser <josh.el...@gmail.com>
Authored: Tue Sep 8 14:46:00 2015 -0800
Committer: Ashutosh Chauhan <hashut...@apache.org>
Committed: Sat Oct 24 14:30:14 2015 -0700

----------------------------------------------------------------------
 .../hive/accumulo/HiveAccumuloHelper.java       | 55 ++++++++++---
 .../mr/HiveAccumuloTableOutputFormat.java       | 50 ++++++++----
 .../hive/accumulo/TestHiveAccumuloHelper.java   | 69 +++++++++++++++-
 .../mr/TestHiveAccumuloTableOutputFormat.java   | 86 +++++++++++++++++++-
 4 files changed, 229 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
----------------------------------------------------------------------
diff --git 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
index dfc5d03..71b8b77 100644
--- 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
+++ 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/HiveAccumuloHelper.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.accumulo;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.Collection;
 
@@ -238,22 +239,56 @@ public class HiveAccumuloHelper {
   public void setZooKeeperInstance(JobConf jobConf, Class<?> 
inputOrOutputFormatClass, String
       zookeepers, String instanceName, boolean useSasl) throws IOException {
     try {
-      Class<?> clientConfigClass = 
JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME);
-
-      // get the ClientConfiguration
-      Object clientConfig = getClientConfiguration(zookeepers, instanceName, 
useSasl);
-
-      // AccumuloOutputFormat.setZooKeeperInstance(JobConf, 
ClientConfiguration) or
-      // AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration)
-      Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod(
-          SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, 
clientConfigClass);
-      setZooKeeperMethod.invoke(null, jobConf, clientConfig);
+      setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, 
zookeepers,
+          instanceName, useSasl);
+    } catch (InvocationTargetException e) {
+      Throwable cause = e.getCause();
+      if (null != cause && cause instanceof IllegalStateException) {
+        throw (IllegalStateException) cause;
+      }
+      throw new IOException("Failed to invoke setZooKeeperInstance method", e);
+    } catch (IllegalStateException e) {
+      // re-throw the ISE so the caller can work around the silly impl that 
throws this in the
+      // first place.
+      throw e;
     } catch (Exception e) {
       throw new IOException("Failed to invoke setZooKeeperInstance method", e);
     }
   }
 
   /**
+   * Wrap the setZooKeeperInstance reflected-call into its own method for 
testing
+   *
+   * @param jobConf
+   *          The JobConf
+   * @param inputOrOutputFormatClass
+   *          The InputFormat or OutputFormat class
+   * @param zookeepers
+   *          ZooKeeper hosts
+   * @param instanceName
+   *          Accumulo instance name
+   * @param useSasl
+   *          Is SASL enabled
+   * @throws IOException
+   *           When invocation of the method fails
+   */
+  void setZooKeeperInstanceWithReflection(JobConf jobConf, Class<?> 
inputOrOutputFormatClass, String
+      zookeepers, String instanceName, boolean useSasl) throws IOException, 
ClassNotFoundException,
+      NoSuchMethodException, SecurityException, IllegalAccessException, 
IllegalArgumentException,
+      InvocationTargetException {
+    Class<?> clientConfigClass = 
JavaUtils.loadClass(CLIENT_CONFIGURATION_CLASS_NAME);
+
+    // get the ClientConfiguration
+    Object clientConfig = getClientConfiguration(zookeepers, instanceName, 
useSasl);
+
+    // AccumuloOutputFormat.setZooKeeperInstance(JobConf, ClientConfiguration) 
or
+    // AccumuloInputFormat.setZooKeeperInstance(JobConf, ClientConfiguration)
+    Method setZooKeeperMethod = inputOrOutputFormatClass.getMethod(
+        SET_ZOOKEEPER_INSTANCE_METHOD_NAME, JobConf.class, clientConfigClass);
+    setZooKeeperMethod.invoke(null, jobConf, clientConfig);
+  }
+      
+  /**
    * Wrapper around <code>ConfiguratorBase.unwrapAuthenticationToken</code> 
which only exists in
    * 1.7.0 and new. Uses reflection to not break compat.
    *

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
index 0189c07..3ae5431 100644
--- 
a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
+++ 
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
@@ -61,7 +61,7 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
   }
 
   protected void configureAccumuloOutputFormat(JobConf job) throws IOException 
{
-    AccumuloConnectionParameters cnxnParams = new 
AccumuloConnectionParameters(job);
+    AccumuloConnectionParameters cnxnParams = getConnectionParams(job);
 
     final String tableName = job.get(AccumuloSerDeParameters.TABLE_NAME);
 
@@ -72,35 +72,35 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
     // Set the necessary Accumulo information
     try {
       if (cnxnParams.useMockInstance()) {
-        setAccumuloMockInstance(job, cnxnParams.getAccumuloInstanceName());
+        setMockInstanceWithErrorChecking(job, 
cnxnParams.getAccumuloInstanceName());
       } else {
         // Accumulo instance name with ZK quorum
-        setAccumuloZooKeeperInstance(job, cnxnParams.getAccumuloInstanceName(),
+        setZooKeeperInstanceWithErrorChecking(job, 
cnxnParams.getAccumuloInstanceName(),
             cnxnParams.getZooKeepers(), cnxnParams.useSasl());
       }
 
       // Extract the delegation Token from the UGI and add it to the job
       // The AccumuloOutputFormat will look for it there.
       if (cnxnParams.useSasl()) {
-        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-        if (!ugi.hasKerberosCredentials()) {
-          helper.addTokenFromUserToJobConf(ugi, job);
+        UserGroupInformation ugi = getCurrentUser();
+        if (!hasKerberosCredentials(ugi)) {
+          getHelper().addTokenFromUserToJobConf(ugi, job);
         } else {
           // Still in the local JVM, can use Kerberos credentials
           try {
             Connector connector = cnxnParams.getConnector();
-            AuthenticationToken token = helper.getDelegationToken(connector);
+            AuthenticationToken token = 
getHelper().getDelegationToken(connector);
 
             // Send the DelegationToken down to the Configuration for Accumulo 
to use
-            setConnectorInfo(job, cnxnParams.getAccumuloUserName(), token);
+            setConnectorInfoWithErrorChecking(job, 
cnxnParams.getAccumuloUserName(), token);
 
             // Convert the Accumulo token in a Hadoop token
-            Token<? extends TokenIdentifier> accumuloToken = 
helper.getHadoopToken(token);
+            Token<? extends TokenIdentifier> accumuloToken = 
getHelper().getHadoopToken(token);
 
             log.info("Adding Hadoop Token for Accumulo to Job's Credentials");
 
             // Add the Hadoop token to the JobConf
-            helper.mergeTokenIntoJobConf(job, accumuloToken);
+            getHelper().mergeTokenIntoJobConf(job, accumuloToken);
 
             // Make sure the UGI contains the token too for good measure
             if (!ugi.addToken(accumuloToken)) {
@@ -111,7 +111,7 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
           }
         }
       } else {
-        setAccumuloConnectorInfo(job, cnxnParams.getAccumuloUserName(),
+        setConnectorInfoWithErrorChecking(job, 
cnxnParams.getAccumuloUserName(),
             new PasswordToken(cnxnParams.getAccumuloPassword()));
       }
 
@@ -125,7 +125,7 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
 
   // Non-static methods to wrap the static AccumuloOutputFormat methods to 
enable testing
 
-  protected void setAccumuloConnectorInfo(JobConf conf, String username, 
AuthenticationToken token)
+  protected void setConnectorInfoWithErrorChecking(JobConf conf, String 
username, AuthenticationToken token)
       throws AccumuloSecurityException {
     try {
       AccumuloOutputFormat.setConnectorInfo(conf, username, token);
@@ -136,14 +136,14 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
   }
 
   @SuppressWarnings("deprecation")
-  protected void setAccumuloZooKeeperInstance(JobConf conf, String 
instanceName, String zookeepers,
+  protected void setZooKeeperInstanceWithErrorChecking(JobConf conf, String 
instanceName, String zookeepers,
       boolean isSasl) throws IOException {
     try {
       if (isSasl) {
         // Reflection to support Accumulo 1.5. Remove when Accumulo 1.5 
support is dropped
         // 1.6 works with the deprecated 1.5 method, but must use reflection 
for 1.7-only
         // SASL support
-        helper.setZooKeeperInstance(conf, AccumuloOutputFormat.class, 
zookeepers, instanceName,
+        getHelper().setZooKeeperInstance(conf, AccumuloOutputFormat.class, 
zookeepers, instanceName,
             isSasl);
       } else {
         AccumuloOutputFormat.setZooKeeperInstance(conf, instanceName, 
zookeepers);
@@ -155,7 +155,7 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
     }
   }
 
-  protected void setAccumuloMockInstance(JobConf conf, String instanceName) {
+  protected void setMockInstanceWithErrorChecking(JobConf conf, String 
instanceName) {
     try {
       AccumuloOutputFormat.setMockInstance(conf, instanceName);
     } catch (IllegalStateException e) {
@@ -167,4 +167,24 @@ public class HiveAccumuloTableOutputFormat extends 
AccumuloOutputFormat {
   protected void setDefaultAccumuloTableName(JobConf conf, String tableName) {
     AccumuloOutputFormat.setDefaultTableName(conf, tableName);
   }
+
+  HiveAccumuloHelper getHelper() {
+    // Allows mocking in testing.
+    return helper;
+  }
+
+  AccumuloConnectionParameters getConnectionParams(JobConf conf) {
+    // Allows mocking in testing.
+    return new AccumuloConnectionParameters(conf);
+  }
+
+  boolean hasKerberosCredentials(UserGroupInformation ugi) {
+    // Allows mocking in testing.
+    return ugi.hasKerberosCredentials();
+  }
+
+  UserGroupInformation getCurrentUser() throws IOException {
+    // Allows mocking in testing.
+    return UserGroupInformation.getCurrentUser();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
----------------------------------------------------------------------
diff --git 
a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
 
b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
index 88544f0..406768a 100644
--- 
a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
+++ 
b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestHiveAccumuloHelper.java
@@ -18,18 +18,23 @@ package org.apache.hadoop.hive.accumulo;
 
 import static org.junit.Assert.assertEquals;
 
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
 import java.util.ArrayList;
 import java.util.Collection;
 
+import org.apache.accumulo.core.client.mapred.AccumuloInputFormat;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.log4j.Logger;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestHiveAccumuloHelper {
+  private static final Logger log = 
Logger.getLogger(TestHiveAccumuloHelper.class);
 
   private HiveAccumuloHelper helper;
 
@@ -46,7 +51,13 @@ public class TestHiveAccumuloHelper {
 
     Mockito.when(token.getService()).thenReturn(service);
 
-    helper.mergeTokenIntoJobConf(jobConf, token);
+    try {
+      helper.mergeTokenIntoJobConf(jobConf, token);
+    } catch (IOException e) {
+      // Hadoop 1 doesn't support credential merging, so this will fail.
+      log.info("Ignoring exception, likely coming from Hadoop 1", e);
+      return;
+    }
 
     Collection<Token<?>> tokens = jobConf.getCredentials().getAllTokens();
     assertEquals(1, tokens.size());
@@ -66,10 +77,64 @@ public class TestHiveAccumuloHelper {
     
Mockito.when(token.getKind()).thenReturn(HiveAccumuloHelper.ACCUMULO_SERVICE);
     Mockito.when(token.getService()).thenReturn(service);
 
-    helper.addTokenFromUserToJobConf(ugi, jobConf);
+    try {
+      helper.addTokenFromUserToJobConf(ugi, jobConf);
+    } catch (IOException e) {
+      // Hadoop 1 doesn't support credential merging, so this will fail.
+      log.info("Ignoring exception, likely coming from Hadoop 1", e);
+      return;
+    }
 
     Collection<Token<?>> credTokens = jobConf.getCredentials().getAllTokens();
     assertEquals(1, credTokens.size());
     assertEquals(service, credTokens.iterator().next().getService());
   }
+
+  @Test(expected = IllegalStateException.class)
+  public void testISEIsPropagated() throws Exception {
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+
+    final JobConf jobConf = Mockito.mock(JobConf.class);
+    final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
+    final String zookeepers = "localhost:2181";
+    final String instanceName = "accumulo_instance";
+    final boolean useSasl = false;
+
+    // Call the real "public" method
+    Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, 
inputOrOutputFormatClass,
+        zookeepers, instanceName, useSasl);
+
+    // Mock the private one to throw the ISE
+    Mockito.doThrow(new IllegalStateException()).when(helper).
+        setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, 
zookeepers,
+            instanceName, useSasl);
+
+    // Should throw an IllegalStateException
+    helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, 
instanceName,
+        useSasl);
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testISEIsPropagatedWithReflection() throws Exception {
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+
+    final JobConf jobConf = Mockito.mock(JobConf.class);
+    final Class<?> inputOrOutputFormatClass = AccumuloInputFormat.class;
+    final String zookeepers = "localhost:2181";
+    final String instanceName = "accumulo_instance";
+    final boolean useSasl = false;
+
+    // Call the real "public" method
+    Mockito.doCallRealMethod().when(helper).setZooKeeperInstance(jobConf, 
inputOrOutputFormatClass,
+        zookeepers, instanceName, useSasl);
+
+    // Mock the private one to throw the IAE
+    Mockito.doThrow(new InvocationTargetException(new 
IllegalStateException())).when(helper).
+        setZooKeeperInstanceWithReflection(jobConf, inputOrOutputFormatClass, 
zookeepers,
+            instanceName, useSasl);
+
+    // Should throw an IllegalStateException
+    helper.setZooKeeperInstance(jobConf, inputOrOutputFormatClass, zookeepers, 
instanceName,
+        useSasl);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ee2d3189/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
 
b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
index 5d3f15b..5fdab28 100644
--- 
a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
+++ 
b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
@@ -18,15 +18,18 @@ package org.apache.hadoop.hive.accumulo.mr;
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.Properties;
 
+import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -36,6 +39,7 @@ import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters;
+import org.apache.hadoop.hive.accumulo.HiveAccumuloHelper;
 import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloRowSerializer;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe;
@@ -54,6 +58,10 @@ import 
org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -87,6 +95,15 @@ public class TestHiveAccumuloTableOutputFormat {
     conf.set(AccumuloConnectionParameters.INSTANCE_NAME, instanceName);
     conf.set(AccumuloConnectionParameters.ZOOKEEPERS, zookeepers);
     conf.set(AccumuloConnectionParameters.TABLE_NAME, outputTable);
+
+    System.setProperty("java.security.krb5.realm", "accumulo");
+    System.setProperty("java.security.krb5.kdc", "fake");
+  }
+
+  @After
+  public void cleanup() {
+    System.setProperty("java.security.krb5.realm", "");
+    System.setProperty("java.security.krb5.kdc", "");
   }
 
   @Test
@@ -94,12 +111,72 @@ public class TestHiveAccumuloTableOutputFormat {
     HiveAccumuloTableOutputFormat outputFormat = 
Mockito.mock(HiveAccumuloTableOutputFormat.class);
 
     
Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+    Mockito.doCallRealMethod().when(outputFormat).getConnectionParams(conf);
+
+    outputFormat.configureAccumuloOutputFormat(conf);
+
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, 
new PasswordToken(password));
+    Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf, 
instanceName, zookeepers, false);
+    Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, 
outputTable);
+  }
+
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testSaslConfiguration() throws IOException, AccumuloException, 
AccumuloSecurityException {
+    final HiveAccumuloTableOutputFormat outputFormat = 
Mockito.mock(HiveAccumuloTableOutputFormat.class);
+    final AuthenticationToken authToken = 
Mockito.mock(AuthenticationToken.class);
+    final Token hadoopToken = Mockito.mock(Token.class);
+    final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
+    final AccumuloConnectionParameters cnxnParams = 
Mockito.mock(AccumuloConnectionParameters.class);
+    final Connector connector = Mockito.mock(Connector.class);
+
+    // Set UGI to use Kerberos
+    // Have to use the string constant to support hadoop 1
+    conf.set("hadoop.security.authentication", "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+
+    // Set the current UGI to a fake user
+    UserGroupInformation user1 = 
UserGroupInformation.createUserForTesting(user, new String[0]);
+    // Use that as the "current user"
+    Mockito.when(outputFormat.getCurrentUser()).thenReturn(user1);
+
+    // Turn off passwords, enable sasl and set a keytab
+    conf.unset(AccumuloConnectionParameters.USER_PASS);
+
+    // Call the real method instead of the mock
+    
Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+
+    // Return our mocked objects
+    Mockito.when(outputFormat.getHelper()).thenReturn(helper);
+    
Mockito.when(outputFormat.getConnectionParams(conf)).thenReturn(cnxnParams);
+    Mockito.when(cnxnParams.getConnector()).thenReturn(connector);
+    Mockito.when(helper.getDelegationToken(connector)).thenReturn(authToken);
+    Mockito.when(helper.getHadoopToken(authToken)).thenReturn(hadoopToken);
 
+    // Stub AccumuloConnectionParameters actions
+    Mockito.when(cnxnParams.useSasl()).thenReturn(true);
+    Mockito.when(cnxnParams.getAccumuloUserName()).thenReturn(user);
+    
Mockito.when(cnxnParams.getAccumuloInstanceName()).thenReturn(instanceName);
+    Mockito.when(cnxnParams.getZooKeepers()).thenReturn(zookeepers);
+
+    // Stub OutputFormat actions
+    Mockito.when(outputFormat.hasKerberosCredentials(user1)).thenReturn(true);
+
+    // Invoke the method
     outputFormat.configureAccumuloOutputFormat(conf);
 
-    Mockito.verify(outputFormat).setAccumuloConnectorInfo(conf, user, new 
PasswordToken(password));
-    Mockito.verify(outputFormat).setAccumuloZooKeeperInstance(conf, 
instanceName, zookeepers, false);
+    // The AccumuloInputFormat methods
+    Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf, 
instanceName, zookeepers, true);
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, 
authToken);
     Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, 
outputTable);
+
+    // Other methods we expect
+    Mockito.verify(helper).mergeTokenIntoJobConf(conf, hadoopToken);
+
+    // Make sure the token made it into the UGI
+    Collection<Token<? extends TokenIdentifier>> tokens = user1.getTokens();
+    Assert.assertEquals(1, tokens.size());
+    Assert.assertEquals(hadoopToken, tokens.iterator().next());
   }
 
   @Test
@@ -109,11 +186,12 @@ public class TestHiveAccumuloTableOutputFormat {
     conf.unset(AccumuloConnectionParameters.ZOOKEEPERS);
 
     
Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
+    Mockito.doCallRealMethod().when(outputFormat).getConnectionParams(conf);
 
     outputFormat.configureAccumuloOutputFormat(conf);
 
-    Mockito.verify(outputFormat).setAccumuloConnectorInfo(conf, user, new 
PasswordToken(password));
-    Mockito.verify(outputFormat).setAccumuloMockInstance(conf, instanceName);
+    Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, 
new PasswordToken(password));
+    Mockito.verify(outputFormat).setMockInstanceWithErrorChecking(conf, 
instanceName);
     Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, 
outputTable);
   }
 

Reply via email to