This is an automated email from the ASF dual-hosted git repository.
mwalch pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git
The following commit(s) were added to refs/heads/master by this push:
new bdd54ed #408 - Removed uses of deprecated ClientConfiguration (#430)
bdd54ed is described below
commit bdd54edae805ed842973720a4e15aef92e8e8579
Author: Mike Walch <[email protected]>
AuthorDate: Tue Apr 24 15:37:11 2018 -0400
#408 - Removed uses of deprecated ClientConfiguration (#430)
---
.../core/client/mapred/AbstractInputFormat.java | 4 +-
.../mapred/AccumuloMultiTableInputFormat.java | 3 -
.../core/client/mapred/AccumuloOutputFormat.java | 1 -
.../core/client/mapreduce/AbstractInputFormat.java | 1 -
.../mapreduce/AccumuloMultiTableInputFormat.java | 6 +-
.../client/mapreduce/AccumuloOutputFormat.java | 1 -
.../mapreduce/lib/impl/ConfiguratorBase.java | 1 -
.../accumulo/core/client/ZooKeeperInstance.java | 18 +-
.../core/client/impl/ClientConfConverter.java | 170 ++++++++++++++++-
.../accumulo/core/client/impl/ClientContext.java | 170 +++--------------
.../accumulo/core/rpc/SaslConnectionParams.java | 56 ++----
.../accumulo/core/trace/DistributedTrace.java | 4 +-
.../core/client/ClientConfigurationTest.java | 6 +-
.../core/client/ZooKeeperInstanceTest.java | 35 +++-
.../core/client/impl/ClientContextTest.java | 6 +-
.../core/client/impl/TableOperationsImplTest.java | 4 +-
.../core/client/impl/TabletLocatorImplTest.java | 5 +-
.../core/client/impl/ThriftTransportKeyTest.java | 65 ++-----
.../core/rpc/SaslConnectionParamsTest.java | 204 ++++++---------------
.../main/java/org/apache/accumulo/proxy/Proxy.java | 7 +-
.../server/rpc/SaslServerConnectionParams.java | 11 +-
.../server/util/RemoveEntriesForMissingFiles.java | 3 +-
.../server/util/VerifyTabletAssignments.java | 4 +-
.../accumulo/server/AccumuloServerContextTest.java | 115 ++++++------
.../server/rpc/SaslServerConnectionParamsTest.java | 62 +++----
.../replication/ReplicationProcessorTest.java | 6 +-
.../test/mapreduce/AccumuloInputFormatIT.java | 31 +---
27 files changed, 433 insertions(+), 566 deletions(-)
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
index f58a8a3..36792a7 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
@@ -271,7 +271,6 @@ public abstract class AbstractInputFormat<K,V> implements
InputFormat<K,V> {
* the Hadoop context for the configured job
* @return an Accumulo instance
* @since 1.5.0
- * @see #setZooKeeperInstance(JobConf, ClientConfiguration)
*/
protected static Instance getInstance(JobConf job) {
return InputConfigurator.getInstance(CLASS, job);
@@ -487,8 +486,7 @@ public abstract class AbstractInputFormat<K,V> implements
InputFormat<K,V> {
String table = baseSplit.getTableName();
// in case the table name changed, we can still use the previous name
for terms of
- // configuration,
- // but the scanner will use the table id resolved at job setup time
+ // configuration, but the scanner will use the table id resolved at job
setup time
InputTableConfig tableConfig = getInputTableConfig(job,
baseSplit.getTableName());
log.debug("Creating connector with user: " + principal);
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
index 871efa8..12b4dbb 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
@@ -19,7 +19,6 @@ package org.apache.accumulo.core.client.mapred;
import java.io.IOException;
import java.util.Map;
-import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.ConnectionInfo;
import org.apache.accumulo.core.client.mapred.InputFormatBase.RecordReaderBase;
import org.apache.accumulo.core.client.mapreduce.InputTableConfig;
@@ -41,9 +40,7 @@ import org.apache.hadoop.mapred.Reporter;
*
* <ul>
* <li>{@link AccumuloInputFormat#setConnectionInfo(JobConf, ConnectionInfo)}
- * <li>{@link AccumuloInputFormat#setConnectorInfo(JobConf, String, String)}
* <li>{@link AccumuloInputFormat#setScanAuthorizations(JobConf,
org.apache.accumulo.core.security.Authorizations)}
- * <li>{@link AccumuloInputFormat#setZooKeeperInstance(JobConf,
ClientConfiguration)}
* <li>{@link
AccumuloMultiTableInputFormat#setInputTableConfigs(org.apache.hadoop.mapred.JobConf,
java.util.Map)}
* </ul>
*
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
index 1daf9e3..c273892 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
@@ -236,7 +236,6 @@ public class AccumuloOutputFormat implements
OutputFormat<Text,Mutation> {
* the Hadoop context for the configured job
* @return an Accumulo instance
* @since 1.5.0
- * @see #setZooKeeperInstance(JobConf, ClientConfiguration)
*/
protected static Instance getInstance(JobConf job) {
return OutputConfigurator.getInstance(CLASS, job);
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
index 2b54efe..b7adbbb 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
@@ -274,7 +274,6 @@ public abstract class AbstractInputFormat<K,V> extends
InputFormat<K,V> {
* the Hadoop context for the configured job
* @return an Accumulo instance
* @since 1.5.0
- * @see #setZooKeeperInstance(Job, ClientConfiguration)
*/
protected static Instance getInstance(JobContext context) {
return InputConfigurator.getInstance(CLASS, context.getConfiguration());
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
index 9d73a55..8c94a78 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
@@ -22,10 +22,9 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
-import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.ConnectionInfo;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
@@ -43,9 +42,8 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
* The user must specify the following via static configurator methods:
*
* <ul>
- * <li>{@link AccumuloMultiTableInputFormat#setConnectorInfo(Job, String,
AuthenticationToken)}
+ * <li>{@link AccumuloMultiTableInputFormat#setConnectionInfo(Job,
ConnectionInfo)}
* <li>{@link AccumuloMultiTableInputFormat#setScanAuthorizations(Job,
Authorizations)}
- * <li>{@link AccumuloMultiTableInputFormat#setZooKeeperInstance(Job,
ClientConfiguration)}
* <li>{@link AccumuloMultiTableInputFormat#setInputTableConfigs(Job, Map)}
* </ul>
*
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
index ae099c6..90a0a62 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
@@ -238,7 +238,6 @@ public class AccumuloOutputFormat extends
OutputFormat<Text,Mutation> {
* the Hadoop context for the configured job
* @return an Accumulo instance
* @since 1.5.0
- * @see #setZooKeeperInstance(Job, ClientConfiguration)
*/
protected static Instance getInstance(JobContext context) {
return OutputConfigurator.getInstance(CLASS, context.getConfiguration());
diff --git
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
index 7bfd4f9..9223b5a 100644
---
a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
+++
b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
@@ -350,7 +350,6 @@ public class ConfiguratorBase {
* the Hadoop configuration object to configure
* @return an Accumulo instance
* @since 1.6.0
- * @see #setZooKeeperInstance(Class, Configuration, ClientConfiguration)
*/
public static Instance getInstance(Class<?> implementingClass, Configuration
conf) {
String instanceType = conf.get(enumToConfKey(implementingClass,
InstanceOpts.TYPE), "");
diff --git
a/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
b/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
index 27c5fea..d7e97cf 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
@@ -26,7 +26,7 @@ import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.impl.ClientContext;
import org.apache.accumulo.core.client.impl.ConnectorImpl;
import org.apache.accumulo.core.client.impl.Credentials;
@@ -75,6 +75,7 @@ public class ZooKeeperInstance implements Instance {
private final int zooKeepersSessionTimeOut;
+ @SuppressWarnings("deprecation")
private ClientConfiguration clientConf;
/**
@@ -85,6 +86,7 @@ public class ZooKeeperInstance implements Instance {
* A comma separated list of zoo keeper server locations. Each
location can contain an
* optional port, of the format host:port.
*/
+ @SuppressWarnings("deprecation")
public ZooKeeperInstance(String instanceName, String zooKeepers) {
this(ClientConfiguration.loadDefault().withInstance(instanceName).withZkHosts(zooKeepers));
}
@@ -137,17 +139,18 @@ public class ZooKeeperInstance implements Instance {
.withZkTimeout(sessionTimeout));
}
+ @SuppressWarnings("deprecation")
ZooKeeperInstance(ClientConfiguration config, ZooCacheFactory zcf) {
checkArgument(config != null, "config is null");
this.clientConf = config;
- this.instanceId = clientConf.get(ClientProperty.INSTANCE_ID);
- this.instanceName = clientConf.get(ClientProperty.INSTANCE_NAME);
+ this.instanceId =
clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ID);
+ this.instanceName =
clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_NAME);
if ((instanceId == null) == (instanceName == null))
throw new IllegalArgumentException(
"Expected exactly one of instanceName and instanceId to be set");
- this.zooKeepers = clientConf.get(ClientProperty.INSTANCE_ZK_HOST);
+ this.zooKeepers =
clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ZK_HOST);
this.zooKeepersSessionTimeOut = (int) ConfigurationTypeHelper
- .getTimeInMillis(clientConf.get(ClientProperty.INSTANCE_ZK_TIMEOUT));
+
.getTimeInMillis(clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ZK_TIMEOUT));
zooCache = zcf.getZooCache(zooKeepers, zooKeepersSessionTimeOut);
if (null != instanceName) {
// Validates that the provided instanceName actually exists
@@ -162,6 +165,7 @@ public class ZooKeeperInstance implements Instance {
* specific to Accumulo.
* @since 1.9.0
*/
+ @SuppressWarnings("deprecation")
public ZooKeeperInstance(ClientConfiguration config) {
this(config, new ZooCacheFactory());
}
@@ -282,8 +286,8 @@ public class ZooKeeperInstance implements Instance {
@Override
public Connector getConnector(String principal, AuthenticationToken token)
throws AccumuloException, AccumuloSecurityException {
- return new ConnectorImpl(
- new ClientContext(this, new Credentials(principal, token),
clientConf));
+ return new ConnectorImpl(new ClientContext(this, new
Credentials(principal, token),
+ ClientConfConverter.toProperties(clientConf)));
}
@Override
diff --git
a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
index d5080f0..16cb286 100644
---
a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
+++
b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
@@ -16,17 +16,31 @@
*/
package org.apache.accumulo.core.client.impl;
+import static
org.apache.accumulo.core.client.ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY;
+
+import java.io.IOException;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
+import java.util.function.Predicate;
import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.ClientProperty;
+import org.apache.accumulo.core.conf.CredentialProviderFactoryShim;
+import org.apache.accumulo.core.conf.DefaultConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.rpc.SaslConnectionParams;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@SuppressWarnings("deprecation")
public class ClientConfConverter {
+ private static final Logger log =
LoggerFactory.getLogger(ClientConfConverter.class);
private static Map<String,String> confProps = new HashMap<>();
private static Map<String,String> propsConf = new HashMap<>();
@@ -56,7 +70,7 @@ public class ClientConfConverter {
propsConf.put(ClientProperty.SASL_QOP.getKey(),
ClientConfiguration.ClientProperty.RPC_SASL_QOP.getKey());
propsConf.put(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
- ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey());
+ KERBEROS_SERVER_PRIMARY.getKey());
for (Map.Entry<String,String> entry : propsConf.entrySet()) {
confProps.put(entry.getValue(), entry.getKey());
@@ -99,4 +113,158 @@ public class ClientConfConverter {
}
return props;
}
+
+ public static Properties toProperties(AccumuloConfiguration config) {
+ return toProperties(toClientConf(config));
+ }
+
+ public static AccumuloConfiguration toAccumuloConf(Properties properties) {
+ return toAccumuloConf(toClientConf(properties));
+ }
+
+ /**
+ * A utility method for converting client configuration to a standard
configuration object for use
+ * internally.
+ *
+ * @param config
+ * the original {@link ClientConfiguration}
+ * @return the client configuration presented in the form of an {@link
AccumuloConfiguration}
+ */
+ public static AccumuloConfiguration toAccumuloConf(final ClientConfiguration
config) {
+
+ final AccumuloConfiguration defaults = DefaultConfiguration.getInstance();
+
+ return new AccumuloConfiguration() {
+
+ @Override
+ public String get(Property property) {
+ final String key = property.getKey();
+
+ // Attempt to load sensitive properties from a CredentialProvider, if
configured
+ if (property.isSensitive()) {
+ org.apache.hadoop.conf.Configuration hadoopConf =
getHadoopConfiguration();
+ if (null != hadoopConf) {
+ try {
+ char[] value = CredentialProviderFactoryShim
+ .getValueFromCredentialProvider(hadoopConf, key);
+ if (null != value) {
+ log.trace("Loaded sensitive value for {} from
CredentialProvider", key);
+ return new String(value);
+ } else {
+ log.trace("Tried to load sensitive value for {} from
CredentialProvider, "
+ + "but none was found", key);
+ }
+ } catch (IOException e) {
+ log.warn("Failed to extract sensitive property ({}) from Hadoop
CredentialProvider,"
+ + " falling back to base AccumuloConfiguration", key, e);
+ }
+ }
+ }
+
+ if (config.containsKey(key))
+ return config.getString(key);
+ else {
+ // Reconstitute the server kerberos property from the client config
+ if (Property.GENERAL_KERBEROS_PRINCIPAL == property) {
+ if (config.containsKey(KERBEROS_SERVER_PRIMARY.getKey())) {
+ // Avoid providing a realm since we don't know what it is...
+ return config.getString(KERBEROS_SERVER_PRIMARY.getKey()) +
"/_HOST@"
+ + SaslConnectionParams.getDefaultRealm();
+ }
+ }
+ return defaults.get(property);
+ }
+ }
+
+ @Override
+ public void getProperties(Map<String,String> props, Predicate<String>
filter) {
+ defaults.getProperties(props, filter);
+
+ Iterator<String> keyIter = config.getKeys();
+ while (keyIter.hasNext()) {
+ String key = keyIter.next();
+ if (filter.test(key))
+ props.put(key, config.getString(key));
+ }
+
+ // Two client props that don't exist on the server config. Client
doesn't need to know about
+ // the Kerberos instance from the principle, but servers do
+ // Automatically reconstruct the server property when converting a
client config.
+ if (props.containsKey(KERBEROS_SERVER_PRIMARY.getKey())) {
+ final String serverPrimary =
props.remove(KERBEROS_SERVER_PRIMARY.getKey());
+ if (filter.test(Property.GENERAL_KERBEROS_PRINCIPAL.getKey())) {
+ // Use the _HOST expansion. It should be unnecessary in "client
land".
+ props.put(Property.GENERAL_KERBEROS_PRINCIPAL.getKey(),
+ serverPrimary + "/_HOST@" +
SaslConnectionParams.getDefaultRealm());
+ }
+ }
+
+ // Attempt to load sensitive properties from a CredentialProvider, if
configured
+ org.apache.hadoop.conf.Configuration hadoopConf =
getHadoopConfiguration();
+ if (null != hadoopConf) {
+ try {
+ for (String key :
CredentialProviderFactoryShim.getKeys(hadoopConf)) {
+ if (!Property.isValidPropertyKey(key) ||
!Property.isSensitive(key)) {
+ continue;
+ }
+
+ if (filter.test(key)) {
+ char[] value = CredentialProviderFactoryShim
+ .getValueFromCredentialProvider(hadoopConf, key);
+ if (null != value) {
+ props.put(key, new String(value));
+ }
+ }
+ }
+ } catch (IOException e) {
+ log.warn("Failed to extract sensitive properties from Hadoop
CredentialProvider, "
+ + "falling back to accumulo-site.xml", e);
+ }
+ }
+ }
+
+ private org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
+ String credProviderPaths = config
+
.getString(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
+ if (null != credProviderPaths && !credProviderPaths.isEmpty()) {
+ org.apache.hadoop.conf.Configuration hConf = new
org.apache.hadoop.conf.Configuration();
+ hConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH,
credProviderPaths);
+ return hConf;
+ }
+
+ log.trace("Did not find credential provider configuration in
ClientConfiguration");
+
+ return null;
+ }
+ };
+ }
+
+ public static ClientConfiguration toClientConf(AccumuloConfiguration conf) {
+ ClientConfiguration clientConf = ClientConfiguration.create();
+
+ // Servers will only have the full principal in their configuration --
parse the
+ // primary and realm from it.
+ final String serverPrincipal =
conf.get(Property.GENERAL_KERBEROS_PRINCIPAL);
+
+ final KerberosName krbName;
+ if (serverPrincipal != null && !serverPrincipal.isEmpty()) {
+ krbName = new KerberosName(serverPrincipal);
+ clientConf.setProperty(KERBEROS_SERVER_PRIMARY,
krbName.getServiceName());
+ }
+
+ HashSet<String> clientKeys = new HashSet<>();
+ for (ClientConfiguration.ClientProperty prop :
ClientConfiguration.ClientProperty.values()) {
+ clientKeys.add(prop.getKey());
+ }
+
+ String key;
+ for (Map.Entry<String,String> entry : conf) {
+ key = entry.getKey();
+ if (clientKeys.contains(key)) {
+ clientConf.setProperty(key, entry.getValue());
+ }
+ }
+ return clientConf;
+ }
+
}
diff --git
a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
index 13880ab..028480a 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
@@ -19,11 +19,8 @@ package org.apache.accumulo.core.client.impl;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
+import java.util.Properties;
import java.util.concurrent.TimeUnit;
-import java.util.function.Predicate;
import java.util.function.Supplier;
import org.apache.accumulo.core.client.AccumuloException;
@@ -35,14 +32,10 @@ import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.CredentialProviderFactoryShim;
-import org.apache.accumulo.core.conf.DefaultConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.rpc.SaslConnectionParams;
import org.apache.accumulo.core.rpc.SslConnectionParams;
import org.apache.accumulo.core.security.thrift.TCredentials;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import com.google.common.base.Suppliers;
@@ -57,8 +50,6 @@ import com.google.common.base.Suppliers;
*/
public class ClientContext {
- private static final Logger log =
LoggerFactory.getLogger(ClientContext.class);
-
protected final Instance inst;
private Credentials creds;
private ClientConfiguration clientConf;
@@ -85,9 +76,10 @@ public class ClientContext {
ConnectionInfoFactory.getBatchWriterConfig(connectionInfo));
}
- /**
- * Instantiate a client context
- */
+ public ClientContext(Instance instance, Credentials credentials, Properties
clientProps) {
+ this(instance, credentials, ClientConfConverter.toClientConf(clientProps));
+ }
+
public ClientContext(Instance instance, Credentials credentials,
ClientConfiguration clientConf) {
this(instance, credentials, clientConf, new BatchWriterConfig());
}
@@ -95,7 +87,7 @@ public class ClientContext {
public ClientContext(Instance instance, Credentials credentials,
ClientConfiguration clientConf,
BatchWriterConfig batchWriterConfig) {
this(instance, credentials,
- convertClientConfig(requireNonNull(clientConf, "clientConf is null")));
+ ClientConfConverter.toAccumuloConf(requireNonNull(clientConf,
"clientConf is null")));
this.clientConf = clientConf;
this.batchWriterConfig = batchWriterConfig;
}
@@ -111,22 +103,20 @@ public class ClientContext {
rpcConf = requireNonNull(serverConf, "serverConf is null");
clientConf = null;
- saslSupplier = new Supplier<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams get() {
- // Use the clientConf if we have it
- if (null != clientConf) {
- if (!clientConf.hasSasl()) {
- return null;
- }
- return new SaslConnectionParams(clientConf,
getCredentials().getToken());
- }
- AccumuloConfiguration conf = getConfiguration();
- if (!conf.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)) {
+ saslSupplier = () -> {
+ // Use the clientConf if we have it
+ if (null != clientConf) {
+ if (!clientConf.hasSasl()) {
return null;
}
- return new SaslConnectionParams(conf, getCredentials().getToken());
+ return new
SaslConnectionParams(ClientConfConverter.toProperties(clientConf),
+ getCredentials().getToken());
+ }
+ AccumuloConfiguration conf = getConfiguration();
+ if (!conf.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)) {
+ return null;
}
+ return new SaslConnectionParams(conf, getCredentials().getToken());
};
timeoutSupplier = memoizeWithExpiration(
@@ -143,6 +133,10 @@ public class ClientContext {
return inst;
}
+ public ConnectionInfo getConnectionInfo() {
+ return new
ConnectionInfoImpl(ClientConfConverter.toProperties(clientConf),
creds.getToken());
+ }
+
/**
* Retrieve the credentials used to construct this context
*/
@@ -224,126 +218,4 @@ public class ClientContext {
return rpcCreds;
}
- /**
- * A utility method for converting client configuration to a standard
configuration object for use
- * internally.
- *
- * @param config
- * the original {@link ClientConfiguration}
- * @return the client configuration presented in the form of an {@link
AccumuloConfiguration}
- */
- public static AccumuloConfiguration convertClientConfig(final
ClientConfiguration config) {
-
- final AccumuloConfiguration defaults = DefaultConfiguration.getInstance();
-
- return new AccumuloConfiguration() {
-
- @Override
- public String get(Property property) {
- final String key = property.getKey();
-
- // Attempt to load sensitive properties from a CredentialProvider, if
configured
- if (property.isSensitive()) {
- org.apache.hadoop.conf.Configuration hadoopConf =
getHadoopConfiguration();
- if (null != hadoopConf) {
- try {
- char[] value = CredentialProviderFactoryShim
- .getValueFromCredentialProvider(hadoopConf, key);
- if (null != value) {
- log.trace("Loaded sensitive value for {} from
CredentialProvider", key);
- return new String(value);
- } else {
- log.trace("Tried to load sensitive value for {} from
CredentialProvider, "
- + "but none was found", key);
- }
- } catch (IOException e) {
- log.warn("Failed to extract sensitive property ({}) from Hadoop
CredentialProvider,"
- + " falling back to base AccumuloConfiguration", key, e);
- }
- }
- }
-
- if (config.containsKey(key))
- return config.getString(key);
- else {
- // Reconstitute the server kerberos property from the client config
- if (Property.GENERAL_KERBEROS_PRINCIPAL == property) {
- if (config
-
.containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey()))
{
- // Avoid providing a realm since we don't know what it is...
- return config
-
.getString(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())
- + "/_HOST@" + SaslConnectionParams.getDefaultRealm();
- }
- }
- return defaults.get(property);
- }
- }
-
- @Override
- public void getProperties(Map<String,String> props, Predicate<String>
filter) {
- defaults.getProperties(props, filter);
-
- Iterator<String> keyIter = config.getKeys();
- while (keyIter.hasNext()) {
- String key = keyIter.next().toString();
- if (filter.test(key))
- props.put(key, config.getString(key));
- }
-
- // Two client props that don't exist on the server config. Client
doesn't need to know about
- // the Kerberos instance from the principle, but servers do
- // Automatically reconstruct the server property when converting a
client config.
- if (props
-
.containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey()))
{
- final String serverPrimary = props
-
.remove(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey());
- if (filter.test(Property.GENERAL_KERBEROS_PRINCIPAL.getKey())) {
- // Use the _HOST expansion. It should be unnecessary in "client
land".
- props.put(Property.GENERAL_KERBEROS_PRINCIPAL.getKey(),
- serverPrimary + "/_HOST@" +
SaslConnectionParams.getDefaultRealm());
- }
- }
-
- // Attempt to load sensitive properties from a CredentialProvider, if
configured
- org.apache.hadoop.conf.Configuration hadoopConf =
getHadoopConfiguration();
- if (null != hadoopConf) {
- try {
- for (String key :
CredentialProviderFactoryShim.getKeys(hadoopConf)) {
- if (!Property.isValidPropertyKey(key) ||
!Property.isSensitive(key)) {
- continue;
- }
-
- if (filter.test(key)) {
- char[] value = CredentialProviderFactoryShim
- .getValueFromCredentialProvider(hadoopConf, key);
- if (null != value) {
- props.put(key, new String(value));
- }
- }
- }
- } catch (IOException e) {
- log.warn("Failed to extract sensitive properties from Hadoop
CredentialProvider, "
- + "falling back to accumulo-site.xml", e);
- }
- }
- }
-
- private org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
- String credProviderPaths = config
-
.getString(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
- if (null != credProviderPaths && !credProviderPaths.isEmpty()) {
- org.apache.hadoop.conf.Configuration hConf = new
org.apache.hadoop.conf.Configuration();
- hConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH,
credProviderPaths);
- return hConf;
- }
-
- log.trace("Did not find credential provider configuration in
ClientConfiguration");
-
- return null;
- }
- };
-
- }
-
}
diff --git
a/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
b/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
index 253af4a..3d81e0c 100644
--- a/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
+++ b/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
@@ -21,23 +21,20 @@ import static java.util.Objects.requireNonNull;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Map;
-import java.util.Map.Entry;
+import java.util.Properties;
import javax.security.auth.callback.CallbackHandler;
import javax.security.sasl.Sasl;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -129,16 +126,16 @@ public class SaslConnectionParams {
protected final Map<String,String> saslProperties;
public SaslConnectionParams(AccumuloConfiguration conf, AuthenticationToken
token) {
- this(ClientConfiguration.fromMap(getProperties(conf)), token);
+ this(ClientConfConverter.toProperties(conf), token);
}
- public SaslConnectionParams(ClientConfiguration conf, AuthenticationToken
token) {
- requireNonNull(conf, "Configuration was null");
+ public SaslConnectionParams(Properties properties, AuthenticationToken
token) {
+ requireNonNull(properties, "Properties was null");
requireNonNull(token, "AuthenticationToken was null");
saslProperties = new HashMap<>();
updatePrincipalFromUgi();
- updateFromConfiguration(conf);
+ updateFromConfiguration(properties);
updateFromToken(token);
}
@@ -156,38 +153,6 @@ public class SaslConnectionParams {
}
}
- protected static Map<String,String> getProperties(AccumuloConfiguration
conf) {
- final Map<String,String> clientProperties = new HashMap<>();
-
- // Servers will only have the full principal in their configuration --
parse the
- // primary and realm from it.
- final String serverPrincipal =
conf.get(Property.GENERAL_KERBEROS_PRINCIPAL);
-
- final KerberosName krbName;
- try {
- krbName = new KerberosName(serverPrincipal);
- clientProperties.put(ClientProperty.KERBEROS_SERVER_PRIMARY.getKey(),
- krbName.getServiceName());
- } catch (Exception e) {
- // bad value or empty, assume we're not using kerberos
- }
-
- HashSet<String> clientKeys = new HashSet<>();
- for (ClientProperty prop : ClientProperty.values()) {
- clientKeys.add(prop.getKey());
- }
-
- String key;
- for (Entry<String,String> entry : conf) {
- key = entry.getKey();
- if (clientKeys.contains(key)) {
- clientProperties.put(key, entry.getValue());
- }
- }
-
- return clientProperties;
- }
-
protected void updatePrincipalFromUgi() {
// Ensure we're using Kerberos auth for Hadoop UGI
if (!UserGroupInformation.isSecurityEnabled()) {
@@ -210,16 +175,17 @@ public class SaslConnectionParams {
}
- protected void updateFromConfiguration(ClientConfiguration conf) {
+ protected void updateFromConfiguration(Properties properties) {
// Get the quality of protection to use
- final String qopValue = conf.get(ClientProperty.RPC_SASL_QOP);
+ final String qopValue = ClientProperty.SASL_QOP.getValue(properties);
this.qop = QualityOfProtection.get(qopValue);
// Add in the SASL properties to a map so we don't have to repeatedly
construct this map
this.saslProperties.put(Sasl.QOP, this.qop.getQuality());
// The primary from the KRB principal on each server (e.g.
primary/instance@realm)
- this.kerberosServerPrimary =
conf.get(ClientProperty.KERBEROS_SERVER_PRIMARY);
+ this.kerberosServerPrimary = properties
+ .getProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey());
}
public Map<String,String> getSaslProperties() {
diff --git
a/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
b/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
index 4df139f..6153728 100644
--- a/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
+++ b/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
@@ -91,8 +91,8 @@ public class DistributedTrace {
* Enable tracing by setting up SpanReceivers for the current process. If
host name is null, it
* will be determined. If service name is null, the simple name of the class
will be used.
* Properties required in the client configuration include
- * {@link
org.apache.accumulo.core.client.ClientConfiguration.ClientProperty#TRACE_SPAN_RECEIVERS}
- * and any properties specific to the span receiver.
+ * {@link org.apache.accumulo.core.conf.ClientProperty#TRACE_SPAN_RECEIVERS}
and any properties
+ * specific to the span receiver.
*/
public static void enable(String hostname, String service, Properties
properties) {
String spanReceivers =
ClientProperty.TRACE_SPAN_RECEIVERS.getValue(properties);
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
b/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
index 7d10ec8..82c1760 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
@@ -26,15 +26,17 @@ import java.io.IOException;
import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.junit.Test;
+@Deprecated
public class ClientConfigurationTest {
+
@Test
- public void testOverrides() throws Exception {
+ public void testOverrides() {
ClientConfiguration clientConfig = createConfig();
assertExpectedConfig(clientConfig);
}
@Test
- public void testSerialization() throws Exception {
+ public void testSerialization() {
ClientConfiguration clientConfig = createConfig();
// sanity check that we're starting with what we're expecting
assertExpectedConfig(clientConfig);
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
b/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
index f35098f..467a4a9 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
@@ -36,19 +36,18 @@ import org.junit.Test;
public class ZooKeeperInstanceTest {
private static final UUID IID = UUID.randomUUID();
private static final String IID_STRING = IID.toString();
- private ClientConfiguration config;
private ZooCacheFactory zcf;
private ZooCache zc;
private ZooKeeperInstance zki;
- private void mockIdConstruction(ClientConfiguration config) {
+ private void mockIdConstruction(@SuppressWarnings("deprecation")
ClientConfiguration config) {
expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(IID_STRING);
expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn(null);
expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
expect(config.get(ClientProperty.INSTANCE_ZK_TIMEOUT)).andReturn("30");
}
- private void mockNameConstruction(ClientConfiguration config) {
+ private void mockNameConstruction(@SuppressWarnings("deprecation")
ClientConfiguration config) {
expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(null);
expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn("instance");
expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
@@ -57,7 +56,8 @@ public class ZooKeeperInstanceTest {
@Before
public void setUp() {
- config = createMock(ClientConfiguration.class);
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
mockNameConstruction(config);
replay(config);
zcf = createMock(ZooCacheFactory.class);
@@ -73,13 +73,26 @@ public class ZooKeeperInstanceTest {
@Test(expected = IllegalArgumentException.class)
public void testInvalidConstruction() {
- config = createMock(ClientConfiguration.class);
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(IID_STRING);
mockNameConstruction(config);
replay(config);
new ZooKeeperInstance(config);
}
+ @Test(expected = IllegalArgumentException.class)
+ public void testInvalidConstruction2() {
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
+ expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(null);
+ expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn(null);
+ expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
+ expect(config.get(ClientProperty.INSTANCE_ZK_TIMEOUT)).andReturn("30");
+ replay(config);
+ new ZooKeeperInstance(config);
+ }
+
@Test
public void testSimpleGetters() {
assertEquals("instance", zki.getInstanceName());
@@ -98,7 +111,8 @@ public class ZooKeeperInstanceTest {
@Test
public void testGetInstanceID_Direct() {
- config = createMock(ClientConfiguration.class);
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
mockIdConstruction(config);
replay(config);
zki = new ZooKeeperInstance(config, zcf);
@@ -109,6 +123,8 @@ public class ZooKeeperInstanceTest {
@Test(expected = RuntimeException.class)
public void testGetInstanceID_NoMapping() {
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
expect(zc.get(Constants.ZROOT + Constants.ZINSTANCES +
"/instance")).andReturn(null);
replay(zc);
EasyMock.reset(config, zcf);
@@ -126,7 +142,8 @@ public class ZooKeeperInstanceTest {
@Test(expected = RuntimeException.class)
public void testGetInstanceID_IDMissingForID() {
- config = createMock(ClientConfiguration.class);
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
mockIdConstruction(config);
replay(config);
zki = new ZooKeeperInstance(config, zcf);
@@ -137,7 +154,8 @@ public class ZooKeeperInstanceTest {
@Test
public void testGetInstanceName() {
- config = createMock(ClientConfiguration.class);
+ @SuppressWarnings("deprecation")
+ ClientConfiguration config = createMock(ClientConfiguration.class);
mockIdConstruction(config);
replay(config);
zki = new ZooKeeperInstance(config, zcf);
@@ -164,6 +182,7 @@ public class ZooKeeperInstanceTest {
.andReturn(IID_STRING.getBytes(UTF_8));
expect(zc.get(Constants.ZROOT + "/" +
IID_STRING)).andReturn("yup".getBytes());
replay(zc, factory);
+ @SuppressWarnings("deprecation")
ClientConfiguration cfg =
ClientConfiguration.loadDefault().withInstance(instanceName)
.withZkHosts(zookeepers);
ZooKeeperInstance zki = new ZooKeeperInstance(cfg, factory);
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
b/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
index b15f9b8..648bfb2 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
@@ -70,7 +70,7 @@ public class ClientContextTest {
ClientConfiguration clientConf = ClientConfiguration.create()
.with(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(),
absPath);
- AccumuloConfiguration accClientConf =
ClientContext.convertClientConfig(clientConf);
+ AccumuloConfiguration accClientConf =
ClientConfConverter.toAccumuloConf(clientConf);
Assert.assertEquals("mysecret",
accClientConf.get(Property.INSTANCE_SECRET));
}
@@ -82,7 +82,7 @@ public class ClientContextTest {
ClientConfiguration clientConf = ClientConfiguration.create();
- AccumuloConfiguration accClientConf =
ClientContext.convertClientConfig(clientConf);
+ AccumuloConfiguration accClientConf =
ClientConfConverter.toAccumuloConf(clientConf);
Assert.assertEquals(Property.INSTANCE_SECRET.getDefaultValue(),
accClientConf.get(Property.INSTANCE_SECRET));
}
@@ -97,7 +97,7 @@ public class ClientContextTest {
ClientConfiguration clientConf = ClientConfiguration.create()
.with(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(),
absPath);
- AccumuloConfiguration accClientConf =
ClientContext.convertClientConfig(clientConf);
+ AccumuloConfiguration accClientConf =
ClientConfConverter.toAccumuloConf(clientConf);
Map<String,String> props = new HashMap<>();
accClientConf.getProperties(props, x -> true);
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
b/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
index 47b9093..129ed30 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
@@ -16,9 +16,9 @@
*/
package org.apache.accumulo.core.client.impl;
+import java.util.Properties;
import java.util.concurrent.TimeUnit;
-import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.Scanner;
@@ -38,7 +38,7 @@ public class TableOperationsImplTest {
Instance instance = EasyMock.createMock(Instance.class);
Credentials credentials = EasyMock.createMock(Credentials.class);
- ClientContext context = new ClientContext(instance, credentials,
ClientConfiguration.create());
+ ClientContext context = new ClientContext(instance, credentials, new
Properties());
TableOperationsImpl topsImpl = new TableOperationsImpl(context);
Connector connector = EasyMock.createMock(Connector.class);
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
b/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
index 5dc1205..6d3a133 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
@@ -30,13 +30,13 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.impl.TabletLocator.TabletLocation;
@@ -175,8 +175,7 @@ public class TabletLocatorImplTest {
@Before
public void setUp() {
testInstance = new TestInstance("instance1", "tserver1");
- context = new ClientContext(testInstance, new Credentials("test", null),
- ClientConfiguration.create());
+ context = new ClientContext(testInstance, new Credentials("test", null),
new Properties());
}
private void runTest(Text tableName, List<Range> ranges, TabletLocatorImpl
tab1TabletCache,
diff --git
a/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
b/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
index fe0ce45..57fa8cb 100644
---
a/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
@@ -26,10 +26,11 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
+import java.util.Properties;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
+import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.rpc.SaslConnectionParams;
import org.apache.accumulo.core.rpc.SslConnectionParams;
import org.apache.accumulo.core.util.HostAndPort;
@@ -42,8 +43,10 @@ import org.junit.Test;
public class ThriftTransportKeyTest {
+ private static final String primary = "accumulo";
+
@Before
- public void setup() throws Exception {
+ public void setup() {
System.setProperty("java.security.krb5.realm", "accumulo");
System.setProperty("java.security.krb5.kdc", "fake");
Configuration conf = new Configuration(false);
@@ -51,6 +54,13 @@ public class ThriftTransportKeyTest {
UserGroupInformation.setConfiguration(conf);
}
+ private static SaslConnectionParams createSaslParams(AuthenticationToken
token) {
+ Properties props = new Properties();
+ props.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
primary);
+ props.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+ return new SaslConnectionParams(props, token);
+ }
+
@Test(expected = RuntimeException.class)
public void testSslAndSaslErrors() {
ClientContext clientCtx = createMock(ClientContext.class);
@@ -74,28 +84,14 @@ public class ThriftTransportKeyTest {
public void testConnectionCaching() throws IOException, InterruptedException
{
UserGroupInformation user1 =
UserGroupInformation.createUserForTesting("user1", new String[0]);
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
- final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
// A first instance of the SASL cnxn params
SaslConnectionParams saslParams1 = user1
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- return new SaslConnectionParams(clientConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
// A second instance of what should be the same SaslConnectionParams
SaslConnectionParams saslParams2 = user1
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- return new SaslConnectionParams(clientConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
ThriftTransportKey ttk1 = new
ThriftTransportKey(HostAndPort.fromParts("localhost", 9997), 1L,
null, saslParams1),
@@ -112,37 +108,11 @@ public class ThriftTransportKeyTest {
UserGroupInformation user1 =
UserGroupInformation.createUserForTesting("user1", new String[0]);
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
SaslConnectionParams saslParams1 = user1
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(clientConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
UserGroupInformation user2 =
UserGroupInformation.createUserForTesting("user2", new String[0]);
SaslConnectionParams saslParams2 = user2
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(clientConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
ThriftTransportKey ttk1 = new
ThriftTransportKey(HostAndPort.fromParts("localhost", 9997), 1L,
null, saslParams1),
@@ -167,5 +137,4 @@ public class ThriftTransportKeyTest {
assertTrue("Normal ThriftTransportKey doesn't equal itself",
ttk.equals(ttk));
}
-
}
diff --git
a/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
b/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
index a93f80f..0028448 100644
---
a/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
+++
b/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
@@ -22,16 +22,15 @@ import static org.junit.Assert.assertNotNull;
import java.security.PrivilegedExceptionAction;
import java.util.Map;
+import java.util.Properties;
import javax.security.sasl.Sasl;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.client.impl.ClientContext;
import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.rpc.SaslConnectionParams.QualityOfProtection;
import org.apache.accumulo.core.rpc.SaslConnectionParams.SaslMechanism;
@@ -46,9 +45,10 @@ public class SaslConnectionParamsTest {
private UserGroupInformation testUser;
private String username;
+ private static final String primary = "accumulo";
@Before
- public void setup() throws Exception {
+ public void setup() {
System.setProperty("java.security.krb5.realm", "accumulo");
System.setProperty("java.security.krb5.kdc", "fake");
Configuration conf = new Configuration(false);
@@ -58,64 +58,48 @@ public class SaslConnectionParamsTest {
username = testUser.getUserName();
}
+ private static SaslConnectionParams createSaslParams(AuthenticationToken
token) {
+ Properties props = new Properties();
+ props.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
primary);
+ props.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+ return new SaslConnectionParams(props, token);
+ }
+
@Test
public void testDefaultParamsAsClient() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
- testUser.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- final SaslConnectionParams saslParams = new
SaslConnectionParams(clientConf, token);
- assertEquals(primary, saslParams.getKerberosServerPrimary());
-
- final QualityOfProtection defaultQop = QualityOfProtection
- .get(Property.RPC_SASL_QOP.getDefaultValue());
- assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
- Map<String,String> properties = saslParams.getSaslProperties();
- assertEquals(1, properties.size());
- assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
- assertEquals(username, saslParams.getPrincipal());
- return null;
- }
+ testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+ final SaslConnectionParams saslParams = createSaslParams(token);
+ assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+ final QualityOfProtection defaultQop = QualityOfProtection
+ .get(Property.RPC_SASL_QOP.getDefaultValue());
+ assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+ Map<String,String> properties = saslParams.getSaslProperties();
+ assertEquals(1, properties.size());
+ assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+ assertEquals(username, saslParams.getPrincipal());
+ return null;
});
}
@Test
public void testDefaultParams() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
- testUser.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- final SaslConnectionParams saslParams = new
SaslConnectionParams(rpcConf, token);
- assertEquals(primary, saslParams.getKerberosServerPrimary());
-
- final QualityOfProtection defaultQop = QualityOfProtection
- .get(Property.RPC_SASL_QOP.getDefaultValue());
- assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
- Map<String,String> properties = saslParams.getSaslProperties();
- assertEquals(1, properties.size());
- assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
- assertEquals(username, saslParams.getPrincipal());
- return null;
- }
+ testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+ final SaslConnectionParams saslParams = createSaslParams(token);
+ assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+ final QualityOfProtection defaultQop = QualityOfProtection
+ .get(Property.RPC_SASL_QOP.getDefaultValue());
+ assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+ Map<String,String> properties = saslParams.getSaslProperties();
+ assertEquals(1, properties.size());
+ assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+ assertEquals(username, saslParams.getPrincipal());
+ return null;
});
}
@@ -123,36 +107,24 @@ public class SaslConnectionParamsTest {
public void testDelegationTokenImpl() throws Exception {
final DelegationTokenImpl token = new DelegationTokenImpl(new byte[0],
new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
- testUser.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- final SaslConnectionParams saslParams = new
SaslConnectionParams(rpcConf, token);
- assertEquals(primary, saslParams.getKerberosServerPrimary());
-
- final QualityOfProtection defaultQop = QualityOfProtection
- .get(Property.RPC_SASL_QOP.getDefaultValue());
- assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
- assertEquals(SaslMechanism.DIGEST_MD5, saslParams.getMechanism());
- assertNotNull(saslParams.getCallbackHandler());
- assertEquals(SaslClientDigestCallbackHandler.class,
- saslParams.getCallbackHandler().getClass());
-
- Map<String,String> properties = saslParams.getSaslProperties();
- assertEquals(1, properties.size());
- assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
- assertEquals(username, saslParams.getPrincipal());
- return null;
- }
+ testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+ final SaslConnectionParams saslParams = createSaslParams(token);
+ assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+ final QualityOfProtection defaultQop = QualityOfProtection
+ .get(Property.RPC_SASL_QOP.getDefaultValue());
+ assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+ assertEquals(SaslMechanism.DIGEST_MD5, saslParams.getMechanism());
+ assertNotNull(saslParams.getCallbackHandler());
+ assertEquals(SaslClientDigestCallbackHandler.class,
+ saslParams.getCallbackHandler().getClass());
+
+ Map<String,String> properties = saslParams.getSaslProperties();
+ assertEquals(1, properties.size());
+ assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+ assertEquals(username, saslParams.getPrincipal());
+ return null;
});
}
@@ -160,38 +132,10 @@ public class SaslConnectionParamsTest {
public void testEquality() throws Exception {
final KerberosToken token = EasyMock.createMock(KerberosToken.class);
SaslConnectionParams params1 = testUser
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(rpcConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
SaslConnectionParams params2 = testUser
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(rpcConf, token);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(token));
assertEquals(params1, params2);
assertEquals(params1.hashCode(), params2.hashCode());
@@ -199,21 +143,7 @@ public class SaslConnectionParamsTest {
final DelegationTokenImpl delToken1 = new DelegationTokenImpl(new byte[0],
new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
SaslConnectionParams params3 = testUser
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(rpcConf, delToken1);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(delToken1));
assertNotEquals(params1, params3);
assertNotEquals(params1.hashCode(), params3.hashCode());
@@ -223,21 +153,7 @@ public class SaslConnectionParamsTest {
final DelegationTokenImpl delToken2 = new DelegationTokenImpl(new byte[0],
new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
SaslConnectionParams params4 = testUser
- .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
- @Override
- public SaslConnectionParams run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
-
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
-
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
- return new SaslConnectionParams(rpcConf, delToken2);
- }
- });
+ .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () ->
createSaslParams(delToken2));
assertNotEquals(params1, params4);
assertNotEquals(params1.hashCode(), params4.hashCode());
diff --git a/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
b/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
index ef84f79..898ea0b 100644
--- a/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
+++ b/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
@@ -26,7 +26,7 @@ import java.util.Properties;
import org.apache.accumulo.core.cli.Help;
import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
import org.apache.accumulo.core.conf.ConfigurationTypeHelper;
import org.apache.accumulo.core.conf.Property;
@@ -255,7 +255,7 @@ public class Proxy implements KeywordExecutable {
SaslServerConnectionParams saslParams = null;
switch (serverType) {
case SSL:
- sslParams =
SslConnectionParams.forClient(ClientContext.convertClientConfig(clientConf));
+ sslParams =
SslConnectionParams.forClient(ClientConfConverter.toAccumuloConf(clientConf));
break;
case SASL:
if (!clientConf.hasSasl()) {
@@ -291,7 +291,8 @@ public class Proxy implements KeywordExecutable {
clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY,
shortName);
KerberosToken token = new KerberosToken();
- saslParams = new SaslServerConnectionParams(clientConf, token, null);
+ saslParams = new
SaslServerConnectionParams(ClientConfConverter.toProperties(clientConf),
+ token, null);
processor = new UGIAssumingProcessor(processor);
diff --git
a/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
b/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
index d49ce14..d09c40a 100644
---
a/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
+++
b/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
@@ -16,7 +16,8 @@
*/
package org.apache.accumulo.server.rpc;
-import org.apache.accumulo.core.client.ClientConfiguration;
+import java.util.Properties;
+
import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
import org.apache.accumulo.core.client.security.tokens.KerberosToken;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
@@ -41,13 +42,13 @@ public class SaslServerConnectionParams extends
SaslConnectionParams {
setSecretManager(secretManager);
}
- public SaslServerConnectionParams(ClientConfiguration conf,
AuthenticationToken token) {
- this(conf, token, null);
+ public SaslServerConnectionParams(Properties clientProps,
AuthenticationToken token) {
+ this(clientProps, token, null);
}
- public SaslServerConnectionParams(ClientConfiguration conf,
AuthenticationToken token,
+ public SaslServerConnectionParams(Properties clientProps,
AuthenticationToken token,
AuthenticationTokenSecretManager secretManager) {
- super(conf, token);
+ super(clientProps, token);
setSecretManager(secretManager);
}
diff --git
a/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
b/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
index 6d04cbf..5b0122c 100644
---
a/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
+++
b/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
@@ -30,7 +30,6 @@ import org.apache.accumulo.core.cli.ScannerOpts;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.impl.ClientContext;
@@ -214,7 +213,7 @@ public class RemoveEntriesForMissingFiles {
opts.parseArgs(RemoveEntriesForMissingFiles.class.getName(), args,
scanOpts, bwOpts);
checkAllTables(new ClientContext(opts.getInstance(),
- new Credentials(opts.getPrincipal(), opts.getToken()),
ClientConfiguration.loadDefault()),
+ new Credentials(opts.getPrincipal(), opts.getToken()),
opts.getClientProperties()),
opts.fix);
}
}
diff --git
a/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
b/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
index 94f214f..d8df68b 100644
---
a/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
+++
b/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
@@ -31,7 +31,6 @@ import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.impl.ClientContext;
import org.apache.accumulo.core.client.impl.Credentials;
import org.apache.accumulo.core.client.impl.Table;
@@ -76,8 +75,7 @@ public class VerifyTabletAssignments {
opts.parseArgs(VerifyTabletAssignments.class.getName(), args);
ClientContext context = new ClientContext(opts.getInstance(),
- new Credentials(opts.getPrincipal(), opts.getToken()),
- ClientConfConverter.toClientConf(opts.getClientProperties()));
+ new Credentials(opts.getPrincipal(), opts.getToken()),
opts.getClientProperties());
Connector conn = opts.getConnector();
for (String table : conn.tableOperations().list())
checkTable(context, opts, table, null);
diff --git
a/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
b/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
index 3946341..b2855c6 100644
---
a/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
+++
b/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
@@ -21,13 +21,13 @@ import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.security.PrivilegedExceptionAction;
+import java.util.Properties;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.impl.Credentials;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.conf.SiteConfiguration;
import org.apache.accumulo.server.conf.ServerConfigurationFactory;
@@ -48,7 +48,7 @@ public class AccumuloServerContextTest {
private String username;
@Before
- public void setup() throws Exception {
+ public void setup() {
System.setProperty("java.security.krb5.realm", "accumulo");
System.setProperty("java.security.krb5.kdc", "fake");
Configuration conf = new Configuration(false);
@@ -61,61 +61,58 @@ public class AccumuloServerContextTest {
@Test
public void testSasl() throws Exception {
- testUser.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
-
- ClientConfiguration clientConf = ClientConfiguration.loadDefault();
- clientConf.setProperty(ClientProperty.INSTANCE_RPC_SASL_ENABLED,
"true");
- clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY,
"accumulo");
- final AccumuloConfiguration conf =
ClientContext.convertClientConfig(clientConf);
- SiteConfiguration siteConfig =
EasyMock.createMock(SiteConfiguration.class);
-
-
EasyMock.expect(siteConfig.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)).andReturn(true);
-
- // Deal with SystemToken being private
- PasswordToken pw = new PasswordToken("fake");
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- pw.write(new DataOutputStream(baos));
- SystemToken token = new SystemToken();
- token.readFields(new DataInputStream(new
ByteArrayInputStream(baos.toByteArray())));
-
- ServerConfigurationFactory factory =
EasyMock.createMock(ServerConfigurationFactory.class);
-
EasyMock.expect(factory.getSystemConfiguration()).andReturn(conf).anyTimes();
-
EasyMock.expect(factory.getSiteConfiguration()).andReturn(siteConfig).anyTimes();
-
- AccumuloServerContext context =
EasyMock.createMockBuilder(AccumuloServerContext.class)
-
.addMockedMethod("enforceKerberosLogin").addMockedMethod("getConfiguration")
-
.addMockedMethod("getServerConfigurationFactory").addMockedMethod("getCredentials")
- .createMock();
- context.enforceKerberosLogin();
- EasyMock.expectLastCall().anyTimes();
- EasyMock.expect(context.getConfiguration()).andReturn(conf).anyTimes();
-
EasyMock.expect(context.getServerConfigurationFactory()).andReturn(factory).anyTimes();
- EasyMock.expect(context.getCredentials())
- .andReturn(new Credentials("accumulo/[email protected]",
token)).once();
-
- // Just make the SiteConfiguration delegate to our ClientConfiguration
(by way of the
- // AccumuloConfiguration)
- // Presently, we only need get(Property) and iterator().
-
EasyMock.expect(siteConfig.get(EasyMock.anyObject(Property.class))).andAnswer(()
-> {
- Object[] args = EasyMock.getCurrentArguments();
- return conf.get((Property) args[0]);
- }).anyTimes();
-
- EasyMock.expect(siteConfig.iterator()).andAnswer(() ->
conf.iterator()).anyTimes();
-
- EasyMock.replay(factory, context, siteConfig);
-
- Assert.assertEquals(ThriftServerType.SASL,
context.getThriftServerType());
- SaslServerConnectionParams saslParams = context.getSaslParams();
- Assert.assertEquals(new SaslServerConnectionParams(conf, token),
saslParams);
- Assert.assertEquals(username, saslParams.getPrincipal());
-
- EasyMock.verify(factory, context, siteConfig);
-
- return null;
- }
+ testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+
+ Properties clientProps = new Properties();
+ clientProps.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+
clientProps.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
"accumulo");
+ final AccumuloConfiguration conf =
ClientConfConverter.toAccumuloConf(clientProps);
+ SiteConfiguration siteConfig =
EasyMock.createMock(SiteConfiguration.class);
+
+
EasyMock.expect(siteConfig.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)).andReturn(true);
+
+ // Deal with SystemToken being private
+ PasswordToken pw = new PasswordToken("fake");
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ pw.write(new DataOutputStream(baos));
+ SystemToken token = new SystemToken();
+ token.readFields(new DataInputStream(new
ByteArrayInputStream(baos.toByteArray())));
+
+ ServerConfigurationFactory factory =
EasyMock.createMock(ServerConfigurationFactory.class);
+
EasyMock.expect(factory.getSystemConfiguration()).andReturn(conf).anyTimes();
+
EasyMock.expect(factory.getSiteConfiguration()).andReturn(siteConfig).anyTimes();
+
+ AccumuloServerContext context =
EasyMock.createMockBuilder(AccumuloServerContext.class)
+
.addMockedMethod("enforceKerberosLogin").addMockedMethod("getConfiguration")
+
.addMockedMethod("getServerConfigurationFactory").addMockedMethod("getCredentials")
+ .createMock();
+ context.enforceKerberosLogin();
+ EasyMock.expectLastCall().anyTimes();
+ EasyMock.expect(context.getConfiguration()).andReturn(conf).anyTimes();
+
EasyMock.expect(context.getServerConfigurationFactory()).andReturn(factory).anyTimes();
+ EasyMock.expect(context.getCredentials())
+ .andReturn(new Credentials("accumulo/[email protected]",
token)).once();
+
+ // Just make the SiteConfiguration delegate to our ClientConfiguration
(by way of the
+ // AccumuloConfiguration)
+ // Presently, we only need get(Property) and iterator().
+
EasyMock.expect(siteConfig.get(EasyMock.anyObject(Property.class))).andAnswer(()
-> {
+ Object[] args = EasyMock.getCurrentArguments();
+ return conf.get((Property) args[0]);
+ }).anyTimes();
+
+
EasyMock.expect(siteConfig.iterator()).andAnswer(conf::iterator).anyTimes();
+
+ EasyMock.replay(factory, context, siteConfig);
+
+ Assert.assertEquals(ThriftServerType.SASL,
context.getThriftServerType());
+ SaslServerConnectionParams saslParams = context.getSaslParams();
+ Assert.assertEquals(new SaslServerConnectionParams(conf, token),
saslParams);
+ Assert.assertEquals(username, saslParams.getPrincipal());
+
+ EasyMock.verify(factory, context, siteConfig);
+
+ return null;
});
}
diff --git
a/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
b/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
index 3535473..c19f60a 100644
---
a/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
+++
b/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
@@ -25,14 +25,14 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.security.PrivilegedExceptionAction;
import java.util.Map;
+import java.util.Properties;
import javax.security.sasl.Sasl;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.rpc.SaslConnectionParams;
import org.apache.accumulo.core.rpc.SaslConnectionParams.QualityOfProtection;
@@ -50,7 +50,7 @@ public class SaslServerConnectionParamsTest {
private String username;
@Before
- public void setup() throws Exception {
+ public void setup() {
System.setProperty("java.security.krb5.realm", "accumulo");
System.setProperty("java.security.krb5.kdc", "fake");
Configuration conf = new Configuration(false);
@@ -62,40 +62,36 @@ public class SaslServerConnectionParamsTest {
@Test
public void testDefaultParamsAsServer() throws Exception {
- testUser.doAs(new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- final ClientConfiguration clientConf =
ClientConfiguration.loadDefault();
+ testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+ Properties clientProps = new Properties();
+ clientProps.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+ final String primary = "accumulo";
+
clientProps.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
primary);
- // The primary is the first component of the principal
- final String primary = "accumulo";
- clientConf.withSasl(true, primary);
+ final AccumuloConfiguration rpcConf =
ClientConfConverter.toAccumuloConf(clientProps);
+ assertEquals("true", rpcConf.get(Property.INSTANCE_RPC_SASL_ENABLED));
- final AccumuloConfiguration rpcConf =
ClientContext.convertClientConfig(clientConf);
- assertEquals("true",
clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
+ // Deal with SystemToken being private
+ PasswordToken pw = new PasswordToken("fake");
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ pw.write(new DataOutputStream(baos));
+ SystemToken token = new SystemToken();
+ token.readFields(new DataInputStream(new
ByteArrayInputStream(baos.toByteArray())));
- // Deal with SystemToken being private
- PasswordToken pw = new PasswordToken("fake");
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- pw.write(new DataOutputStream(baos));
- SystemToken token = new SystemToken();
- token.readFields(new DataInputStream(new
ByteArrayInputStream(baos.toByteArray())));
+ final SaslConnectionParams saslParams = new
SaslServerConnectionParams(rpcConf, token);
+ assertEquals(primary, saslParams.getKerberosServerPrimary());
+ assertEquals(SaslMechanism.GSSAPI, saslParams.getMechanism());
+ assertNull(saslParams.getCallbackHandler());
- final SaslConnectionParams saslParams = new
SaslServerConnectionParams(rpcConf, token);
- assertEquals(primary, saslParams.getKerberosServerPrimary());
- assertEquals(SaslMechanism.GSSAPI, saslParams.getMechanism());
- assertNull(saslParams.getCallbackHandler());
+ final QualityOfProtection defaultQop = QualityOfProtection
+ .get(Property.RPC_SASL_QOP.getDefaultValue());
+ assertEquals(defaultQop, saslParams.getQualityOfProtection());
- final QualityOfProtection defaultQop = QualityOfProtection
- .get(Property.RPC_SASL_QOP.getDefaultValue());
- assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
- Map<String,String> properties = saslParams.getSaslProperties();
- assertEquals(1, properties.size());
- assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
- assertEquals(username, saslParams.getPrincipal());
- return null;
- }
+ Map<String,String> properties = saslParams.getSaslProperties();
+ assertEquals(1, properties.size());
+ assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+ assertEquals(username, saslParams.getPrincipal());
+ return null;
});
}
diff --git
a/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
b/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
index 1277e0c..1aff864 100644
---
a/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
+++
b/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
@@ -20,8 +20,8 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import java.util.HashMap;
import java.util.Map;
+import java.util.Properties;
-import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.impl.ClientContext;
import org.apache.accumulo.core.client.impl.Credentials;
@@ -47,7 +47,7 @@ public class ReplicationProcessorTest {
Instance inst = EasyMock.createMock(Instance.class);
VolumeManager fs = EasyMock.createMock(VolumeManager.class);
Credentials creds = new Credentials("foo", new PasswordToken("bar"));
- ClientContext context = new ClientContext(inst, creds,
ClientConfiguration.create());
+ ClientContext context = new ClientContext(inst, creds, new Properties());
Map<String,String> data = new HashMap<>();
@@ -66,7 +66,7 @@ public class ReplicationProcessorTest {
Instance inst = EasyMock.createMock(Instance.class);
VolumeManager fs = EasyMock.createMock(VolumeManager.class);
Credentials creds = new Credentials("foo", new PasswordToken("bar"));
- ClientContext context = new ClientContext(inst, creds,
ClientConfiguration.create());
+ ClientContext context = new ClientContext(inst, creds, new Properties());
Map<String,String> data = new HashMap<>();
ConfigurationCopy conf = new ConfigurationCopy(data);
diff --git
a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
index d9d6bf6..1e0c2a7 100644
---
a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
+++
b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
@@ -35,8 +35,6 @@ import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.admin.NewTableConfiguration;
@@ -46,10 +44,6 @@ import
org.apache.accumulo.core.client.mapreduce.impl.BatchInputSplit;
import org.apache.accumulo.core.client.sample.RowSampler;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.ConfigurationCopy;
-import org.apache.accumulo.core.conf.DefaultConfiguration;
-import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
@@ -107,32 +101,9 @@ public class AccumuloInputFormatIT extends
AccumuloClusterHarness {
conn.tableOperations().create(table);
insertData(table, currentTimeMillis());
- ClientConfiguration clientConf = cluster.getClientConfig();
- AccumuloConfiguration clusterClientConf = new ConfigurationCopy(
- DefaultConfiguration.getInstance());
-
- // Pass SSL and CredentialProvider options into the ClientConfiguration
given to
- // AccumuloInputFormat
- boolean sslEnabled =
Boolean.valueOf(clusterClientConf.get(Property.INSTANCE_RPC_SSL_ENABLED));
- if (sslEnabled) {
- ClientProperty[] sslProperties = new ClientProperty[] {
- ClientProperty.INSTANCE_RPC_SSL_ENABLED,
ClientProperty.INSTANCE_RPC_SSL_CLIENT_AUTH,
- ClientProperty.RPC_SSL_KEYSTORE_PATH,
ClientProperty.RPC_SSL_KEYSTORE_TYPE,
- ClientProperty.RPC_SSL_KEYSTORE_PASSWORD,
ClientProperty.RPC_SSL_TRUSTSTORE_PATH,
- ClientProperty.RPC_SSL_TRUSTSTORE_TYPE,
ClientProperty.RPC_SSL_TRUSTSTORE_PASSWORD,
- ClientProperty.RPC_USE_JSSE,
ClientProperty.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS};
-
- for (ClientProperty prop : sslProperties) {
- // The default property is returned if it's not in the
ClientConfiguration so we don't have
- // to check if the value is actually defined
- clientConf.setProperty(prop, clusterClientConf.get(prop.getKey()));
- }
- }
-
Job job = Job.getInstance();
AccumuloInputFormat.setInputTableName(job, table);
- AccumuloInputFormat.setZooKeeperInstance(job, clientConf);
- AccumuloInputFormat.setConnectorInfo(job, getAdminPrincipal(),
getAdminToken());
+ AccumuloInputFormat.setConnectionInfo(job, getConnectionInfo());
// split table
TreeSet<Text> splitsToAdd = new TreeSet<>();
--
To stop receiving notification emails like this one, please contact
[email protected].