This is an automated email from the ASF dual-hosted git repository.
arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
The following commit(s) were added to refs/heads/master by this push:
new c346859 DRILL-6526: Refactor FileSystemConfig to disallow direct
access from the code to its variables
c346859 is described below
commit c346859735e4eab3ad12e755e5643fceb9536f74
Author: Arina Ielchiieva <[email protected]>
AuthorDate: Fri Jun 22 19:36:41 2018 +0300
DRILL-6526: Refactor FileSystemConfig to disallow direct access from the
code to its variables
---
.../drill/exec/store/dfs/FileSystemConfig.java | 44 ++++++++++++++++++---
.../drill/exec/store/dfs/FileSystemPlugin.java | 31 +++++++--------
.../apache/drill/exec/store/dfs/FormatCreator.java | 11 +++---
.../exec/store/easy/json/JSONFormatPlugin.java | 14 ++++---
.../exec/store/easy/text/TextFormatPlugin.java | 26 ++++++------
.../exec/store/parquet/ParquetFormatPlugin.java | 10 ++---
.../drill/exec/util/StoragePluginTestUtils.java | 46 +++++++++++++++-------
.../exec/impersonation/BaseTestImpersonation.java | 21 ++++------
.../java/org/apache/drill/exec/sql/TestCTTAS.java | 17 +++++++-
.../org/apache/drill/exec/sql/TestInfoSchema.java | 2 +-
.../java/org/apache/drill/test/ClusterFixture.java | 24 ++++++++---
11 files changed, 158 insertions(+), 88 deletions(-)
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
index 3c8f3a7..4eda955 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.store.dfs;
import java.util.Map;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -26,12 +28,44 @@ import com.fasterxml.jackson.annotation.JsonTypeName;
@JsonTypeName(FileSystemConfig.NAME)
public class FileSystemConfig extends StoragePluginConfig {
- static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(FileSystemConfig.class);
+
public static final String NAME = "file";
- public String connection;
- public Map<String, String> config;
- public Map<String, WorkspaceConfig> workspaces;
- public Map<String, FormatPluginConfig> formats;
+
+ private final String connection;
+ private final Map<String, String> config;
+ private final Map<String, WorkspaceConfig> workspaces;
+ private final Map<String, FormatPluginConfig> formats;
+
+ @JsonCreator
+ public FileSystemConfig(@JsonProperty("connection") String connection,
+ @JsonProperty("config") Map<String, String> config,
+ @JsonProperty("workspaces") Map<String,
WorkspaceConfig> workspaces,
+ @JsonProperty("formats") Map<String,
FormatPluginConfig> formats) {
+ this.connection = connection;
+ this.config = config;
+ this.workspaces = workspaces;
+ this.formats = formats;
+ }
+
+ @JsonProperty
+ public String getConnection() {
+ return connection;
+ }
+
+ @JsonProperty
+ public Map<String, String> getConfig() {
+ return config;
+ }
+
+ @JsonProperty
+ public Map<String, WorkspaceConfig> getWorkspaces() {
+ return workspaces;
+ }
+
+ @JsonProperty
+ public Map<String, FormatPluginConfig> getFormats() {
+ return formats;
+ }
@Override
public int hashCode() {
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index 734ab73..e71e7e1 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -20,8 +20,11 @@ package org.apache.drill.exec.store.dfs;
import static
org.apache.drill.exec.store.dfs.FileSystemSchemaFactory.DEFAULT_WS_NAME;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import org.apache.calcite.schema.SchemaPlus;
@@ -44,8 +47,6 @@ import org.apache.hadoop.fs.FileSystem;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
/**
* A Storage engine associated with a Hadoop FileSystem Implementation.
Examples include HDFS, MapRFS, QuantacastFileSystem,
@@ -62,42 +63,38 @@ public class FileSystemPlugin extends AbstractStoragePlugin
{
private final Configuration fsConf;
private final LogicalPlanPersistence lpPersistance;
- public FileSystemPlugin(FileSystemConfig config, DrillbitContext context,
String name)
- throws ExecutionSetupException{
+ public FileSystemPlugin(FileSystemConfig config, DrillbitContext context,
String name) throws ExecutionSetupException {
super(context, name);
this.config = config;
this.lpPersistance = context.getLpPersistence();
try {
-
fsConf = new Configuration();
- if (config.config != null) {
- for (String s : config.config.keySet()) {
- fsConf.set(s, config.config.get(s));
- }
- }
- fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.connection);
+ Optional.ofNullable(config.getConfig())
+ .ifPresent(c -> c.forEach(fsConf::set));
+
+ fsConf.set(FileSystem.FS_DEFAULT_NAME_KEY, config.getConnection());
fsConf.set("fs.classpath.impl", ClassPathFileSystem.class.getName());
fsConf.set("fs.drill-local.impl",
LocalSyncableFileSystem.class.getName());
formatCreator = newFormatCreator(config, context, fsConf);
- List<FormatMatcher> matchers = Lists.newArrayList();
- formatPluginsByConfig = Maps.newHashMap();
+ List<FormatMatcher> matchers = new ArrayList<>();
+ formatPluginsByConfig = new HashMap<>();
for (FormatPlugin p : formatCreator.getConfiguredFormatPlugins()) {
matchers.add(p.getMatcher());
formatPluginsByConfig.put(p.getConfig(), p);
}
- final boolean noWorkspace = config.workspaces == null ||
config.workspaces.isEmpty();
- List<WorkspaceSchemaFactory> factories = Lists.newArrayList();
+ boolean noWorkspace = config.getWorkspaces() == null ||
config.getWorkspaces().isEmpty();
+ List<WorkspaceSchemaFactory> factories = new ArrayList<>();
if (!noWorkspace) {
- for (Map.Entry<String, WorkspaceConfig> space :
config.workspaces.entrySet()) {
+ for (Map.Entry<String, WorkspaceConfig> space :
config.getWorkspaces().entrySet()) {
factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name,
space.getValue(), matchers, context.getLpPersistence(),
context.getClasspathScan()));
}
}
// if the "default" workspace is not given add one.
- if (noWorkspace || !config.workspaces.containsKey(DEFAULT_WS_NAME)) {
+ if (noWorkspace || !config.getWorkspaces().containsKey(DEFAULT_WS_NAME))
{
factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name,
WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence(),
context.getClasspathScan()));
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
index fe9014b..b981adf 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatCreator.java
@@ -21,6 +21,7 @@ import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Map;
import org.apache.drill.common.exceptions.UserException;
@@ -31,8 +32,6 @@ import org.apache.drill.common.util.ConstructorChecker;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.hadoop.conf.Configuration;
-import com.google.common.collect.Maps;
-
/**
* Responsible for instantiating format plugins
*/
@@ -51,7 +50,7 @@ public class FormatCreator {
* @return a map of type to constructor that taks the config
*/
private static Map<Class<?>, Constructor<?>>
initConfigConstructors(Collection<Class<? extends FormatPlugin>> pluginClasses)
{
- Map<Class<?>, Constructor<?>> constructors = Maps.newHashMap();
+ Map<Class<?>, Constructor<?>> constructors = new HashMap<>();
for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
for (Constructor<?> c : pluginClass.getConstructors()) {
try {
@@ -91,8 +90,8 @@ public class FormatCreator {
this.pluginClasses = classpathScan.getImplementations(FormatPlugin.class);
this.configConstructors = initConfigConstructors(pluginClasses);
- Map<String, FormatPlugin> plugins = Maps.newHashMap();
- if (storageConfig.formats == null || storageConfig.formats.isEmpty()) {
+ Map<String, FormatPlugin> plugins = new HashMap<>();
+ if (storageConfig.getFormats() == null ||
storageConfig.getFormats().isEmpty()) {
for (Class<? extends FormatPlugin> pluginClass: pluginClasses) {
for (Constructor<?> c : pluginClass.getConstructors()) {
try {
@@ -107,7 +106,7 @@ public class FormatCreator {
}
}
} else {
- for (Map.Entry<String, FormatPluginConfig> e :
storageConfig.formats.entrySet()) {
+ for (Map.Entry<String, FormatPluginConfig> e :
storageConfig.getFormats().entrySet()) {
Constructor<?> c = configConstructors.get(e.getValue().getClass());
if (c == null) {
logger.warn("Unable to find constructor for storage config named
'{}' of type '{}'.", e.getKey(), e.getValue().getClass().getName());
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 095e09a..5eec5cc 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -18,10 +18,10 @@
package org.apache.drill.exec.store.easy.json;
import java.io.IOException;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -44,7 +44,6 @@ import org.apache.hadoop.fs.FileSystem;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
@@ -60,14 +59,17 @@ public class JSONFormatPlugin extends
EasyFormatPlugin<JSONFormatConfig> {
}
@Override
- public RecordReader getRecordReader(FragmentContext context, DrillFileSystem
dfs, FileWork fileWork,
- List<SchemaPath> columns, String userName) throws
ExecutionSetupException {
+ public RecordReader getRecordReader(FragmentContext context,
+ DrillFileSystem dfs,
+ FileWork fileWork,
+ List<SchemaPath> columns,
+ String userName) {
return new JSONRecordReader(context, fileWork.getPath(), dfs, columns);
}
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter
writer) throws IOException {
- Map<String, String> options = Maps.newHashMap();
+ Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -76,7 +78,7 @@ public class JSONFormatPlugin extends
EasyFormatPlugin<JSONFormatConfig> {
options.put("prefix", fragmentId);
options.put("separator", " ");
- options.put(FileSystem.FS_DEFAULT_NAME_KEY,
((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)
writer.getStorageConfig()).getConnection());
options.put("extension", "json");
options.put("extended",
Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_EXTENDED_TYPES)));
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index 8209252..b61ce30 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -19,10 +19,10 @@ package org.apache.drill.exec.store.easy.text;
import java.io.IOException;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
@@ -60,14 +60,13 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
public class TextFormatPlugin extends
EasyFormatPlugin<TextFormatPlugin.TextFormatConfig> {
private final static String DEFAULT_NAME = "text";
public TextFormatPlugin(String name, DrillbitContext context, Configuration
fsConf, StoragePluginConfig storageConfig) {
super(name, context, fsConf, storageConfig, new TextFormatConfig(), true,
false, true, true,
- Collections.<String>emptyList(), DEFAULT_NAME);
+ Collections.emptyList(), DEFAULT_NAME);
}
public TextFormatPlugin(String name, DrillbitContext context, Configuration
fsConf, StoragePluginConfig config,
@@ -78,17 +77,20 @@ public class TextFormatPlugin extends
EasyFormatPlugin<TextFormatPlugin.TextForm
@Override
- public RecordReader getRecordReader(FragmentContext context, DrillFileSystem
dfs, FileWork fileWork,
- List<SchemaPath> columns, String userName) throws
ExecutionSetupException {
+ public RecordReader getRecordReader(FragmentContext context,
+ DrillFileSystem dfs,
+ FileWork fileWork,
+ List<SchemaPath> columns,
+ String userName) {
Path path = dfs.makeQualified(new Path(fileWork.getPath()));
FileSplit split = new FileSplit(path, fileWork.getStart(),
fileWork.getLength(), new String[]{""});
- if
(context.getOptions().getOption(ExecConstants.ENABLE_NEW_TEXT_READER_KEY).bool_val
== true) {
+ if
(context.getOptions().getBoolean(ExecConstants.ENABLE_NEW_TEXT_READER_KEY)) {
TextParsingSettings settings = new TextParsingSettings();
- settings.set((TextFormatConfig)formatConfig);
+ settings.set(formatConfig);
return new CompliantTextRecordReader(split, dfs, settings, columns);
} else {
- char delim = ((TextFormatConfig)formatConfig).getFieldDelimiter();
+ char delim = formatConfig.getFieldDelimiter();
return new DrillTextRecordReader(split, dfs.getConf(), context, delim,
columns);
}
}
@@ -112,7 +114,7 @@ public class TextFormatPlugin extends
EasyFormatPlugin<TextFormatPlugin.TextForm
@Override
public RecordWriter getRecordWriter(final FragmentContext context, final
EasyWriter writer) throws IOException {
- final Map<String, String> options = Maps.newHashMap();
+ final Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -120,10 +122,10 @@ public class TextFormatPlugin extends
EasyFormatPlugin<TextFormatPlugin.TextForm
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(),
handle.getMinorFragmentId());
options.put("prefix", fragmentId);
- options.put("separator",
((TextFormatConfig)getConfig()).getFieldDelimiterAsString());
- options.put(FileSystem.FS_DEFAULT_NAME_KEY,
((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put("separator", getConfig().getFieldDelimiterAsString());
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)
writer.getStorageConfig()).getConnection());
- options.put("extension",
((TextFormatConfig)getConfig()).getExtensions().get(0));
+ options.put("extension", getConfig().getExtensions().get(0));
RecordWriter recordWriter = new
DrillTextRecordWriter(context.getAllocator(), writer.getStorageStrategy());
recordWriter.init(options);
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
index 94760da..7d5959b 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
@@ -18,6 +18,7 @@
package org.apache.drill.exec.store.parquet;
import java.io.IOException;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -65,7 +66,6 @@ import org.apache.parquet.hadoop.ParquetFileWriter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
public class ParquetFormatPlugin implements FormatPlugin{
private static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(MockStorageEngine.class);
@@ -126,12 +126,12 @@ public class ParquetFormatPlugin implements FormatPlugin{
}
@Override
- public AbstractWriter getWriter(PhysicalOperator child, String location,
List<String> partitionColumns) throws IOException {
+ public AbstractWriter getWriter(PhysicalOperator child, String location,
List<String> partitionColumns) {
return new ParquetWriter(child, location, partitionColumns, this);
}
public RecordWriter getRecordWriter(FragmentContext context, ParquetWriter
writer) throws IOException, OutOfMemoryException {
- Map<String, String> options = Maps.newHashMap();
+ Map<String, String> options = new HashMap<>();
options.put("location", writer.getLocation());
@@ -139,7 +139,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(),
handle.getMinorFragmentId());
options.put("prefix", fragmentId);
- options.put(FileSystem.FS_DEFAULT_NAME_KEY,
((FileSystemConfig)writer.getStorageConfig()).connection);
+ options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)
writer.getStorageConfig()).getConnection());
options.put(ExecConstants.PARQUET_BLOCK_SIZE,
context.getOptions().getOption(ExecConstants.PARQUET_BLOCK_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_WRITER_USE_SINGLE_FS_BLOCK,
@@ -215,7 +215,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
private final ParquetFormatConfig formatConfig;
- public ParquetFormatMatcher(ParquetFormatPlugin plugin,
ParquetFormatConfig formatConfig) {
+ ParquetFormatMatcher(ParquetFormatPlugin plugin, ParquetFormatConfig
formatConfig) {
super(plugin, PATTERNS, MAGIC_STRINGS);
this.formatConfig = formatConfig;
}
diff --git
a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
index 4a90a4e..16836c2 100644
---
a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
+++
b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
@@ -18,11 +18,13 @@
package org.apache.drill.exec.util;
import java.io.File;
+import java.util.HashMap;
import java.util.Map;
+import java.util.Optional;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.dfs.FileSystemConfig;
import org.apache.drill.exec.store.dfs.FileSystemPlugin;
@@ -68,21 +70,27 @@ public class StoragePluginTestUtils {
final FileSystemPlugin plugin = (FileSystemPlugin)
pluginRegistry.getPlugin(pluginName);
final FileSystemConfig pluginConfig = (FileSystemConfig)
plugin.getConfig();
- Map<String, WorkspaceConfig> workspaces = Maps.newHashMap();
+ Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
+ Optional.ofNullable(pluginConfig.getWorkspaces())
+ .ifPresent(newWorkspaces::putAll);
if (schemas.length == 0) {
schemas = new String[]{TMP_SCHEMA};
}
- for (String schema: schemas) {
- WorkspaceConfig workspaceConfig = pluginConfig.workspaces.get(schema);
- String inputFormat = workspaceConfig == null ? null:
workspaceConfig.getDefaultInputFormat();
+ for (String schema : schemas) {
+ WorkspaceConfig workspaceConfig = newWorkspaces.get(schema);
+ String inputFormat = workspaceConfig == null ? null :
workspaceConfig.getDefaultInputFormat();
WorkspaceConfig newWorkspaceConfig = new
WorkspaceConfig(tmpDirPath.getAbsolutePath(), true, inputFormat, false);
- workspaces.put(schema, newWorkspaceConfig);
+ newWorkspaces.put(schema, newWorkspaceConfig);
}
- pluginConfig.workspaces.putAll(workspaces);
- pluginRegistry.createOrUpdate(pluginName, pluginConfig, true);
+ FileSystemConfig newPluginConfig = new FileSystemConfig(
+ pluginConfig.getConnection(),
+ pluginConfig.getConfig(),
+ newWorkspaces,
+ pluginConfig.getFormats());
+ pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
}
public static void configureFormatPlugins(StoragePluginRegistry
pluginRegistry) throws ExecutionSetupException {
@@ -94,32 +102,42 @@ public class StoragePluginTestUtils {
FileSystemPlugin fileSystemPlugin = (FileSystemPlugin)
pluginRegistry.getPlugin(storagePlugin);
FileSystemConfig fileSystemConfig = (FileSystemConfig)
fileSystemPlugin.getConfig();
+ Map<String, FormatPluginConfig> newFormats = new HashMap<>();
+ Optional.ofNullable(fileSystemConfig.getFormats())
+ .ifPresent(newFormats::putAll);
+
TextFormatPlugin.TextFormatConfig textConfig = new
TextFormatPlugin.TextFormatConfig();
textConfig.extensions = ImmutableList.of("txt");
textConfig.fieldDelimiter = '\u0000';
- fileSystemConfig.formats.put("txt", textConfig);
+ newFormats.put("txt", textConfig);
TextFormatPlugin.TextFormatConfig ssvConfig = new
TextFormatPlugin.TextFormatConfig();
ssvConfig.extensions = ImmutableList.of("ssv");
ssvConfig.fieldDelimiter = ' ';
- fileSystemConfig.formats.put("ssv", ssvConfig);
+ newFormats.put("ssv", ssvConfig);
TextFormatPlugin.TextFormatConfig psvConfig = new
TextFormatPlugin.TextFormatConfig();
psvConfig.extensions = ImmutableList.of("tbl");
psvConfig.fieldDelimiter = '|';
- fileSystemConfig.formats.put("psv", psvConfig);
+ newFormats.put("psv", psvConfig);
SequenceFileFormatConfig seqConfig = new SequenceFileFormatConfig();
seqConfig.extensions = ImmutableList.of("seq");
- fileSystemConfig.formats.put("sequencefile", seqConfig);
+ newFormats.put("sequencefile", seqConfig);
TextFormatPlugin.TextFormatConfig csvhtestConfig = new
TextFormatPlugin.TextFormatConfig();
csvhtestConfig.extensions = ImmutableList.of("csvh-test");
csvhtestConfig.fieldDelimiter = ',';
csvhtestConfig.extractHeader = true;
csvhtestConfig.skipFirstLine = true;
- fileSystemConfig.formats.put("csvh-test", csvhtestConfig);
+ newFormats.put("csvh-test", csvhtestConfig);
+
+ FileSystemConfig newFileSystemConfig = new FileSystemConfig(
+ fileSystemConfig.getConnection(),
+ fileSystemConfig.getConfig(),
+ fileSystemConfig.getWorkspaces(),
+ newFormats);
- pluginRegistry.createOrUpdate(storagePlugin, fileSystemConfig, true);
+ pluginRegistry.createOrUpdate(storagePlugin, newFileSystemConfig, true);
}
}
diff --git
a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
index de2ddc1..6a4452e 100644
---
a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
+++
b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
@@ -19,7 +19,6 @@ package org.apache.drill.exec.impersonation;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableMap;
import org.apache.commons.io.FileUtils;
import org.apache.drill.PlanTestBase;
import org.apache.drill.common.config.DrillConfig;
@@ -122,22 +121,16 @@ public class BaseTestImpersonation extends PlanTestBase {
updateTestCluster(1, DrillConfig.create(props));
}
- protected static void addMiniDfsBasedStorage(final Map<String,
WorkspaceConfig> workspaces)
- throws Exception {
+ protected static void addMiniDfsBasedStorage(final Map<String,
WorkspaceConfig> workspaces) throws Exception {
// Create a HDFS based storage plugin based on local storage plugin and
add it to plugin registry (connection string
// for mini dfs is varies for each run).
- final StoragePluginRegistry pluginRegistry =
getDrillbitContext().getStorage();
- final FileSystemConfig lfsPluginConfig = (FileSystemConfig)
pluginRegistry.getPlugin("dfs").getConfig();
+ StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
+ FileSystemConfig lfsPluginConfig = (FileSystemConfig)
pluginRegistry.getPlugin("dfs").getConfig();
- final FileSystemConfig miniDfsPluginConfig = new FileSystemConfig();
- miniDfsPluginConfig.connection =
dfsConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
-
- createAndAddWorkspace("tmp", "/tmp", (short)0777, processUser,
processUser, workspaces);
-
- miniDfsPluginConfig.workspaces = workspaces;
- miniDfsPluginConfig.formats = ImmutableMap.copyOf(lfsPluginConfig.formats);
- miniDfsPluginConfig.setEnabled(true);
+ String connection = dfsConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
+ createAndAddWorkspace("tmp", "/tmp", (short) 0777, processUser,
processUser, workspaces);
+ FileSystemConfig miniDfsPluginConfig = new FileSystemConfig(connection,
null, workspaces, lfsPluginConfig.getFormats());
pluginRegistry.createOrUpdate(MINIDFS_STORAGE_PLUGIN_NAME,
miniDfsPluginConfig, true);
}
@@ -150,7 +143,7 @@ public class BaseTestImpersonation extends PlanTestBase {
workspaces.put(name, ws);
}
- protected static void stopMiniDfsCluster() throws Exception {
+ protected static void stopMiniDfsCluster() {
if (dfsCluster != null) {
dfsCluster.shutdown();
dfsCluster = null;
diff --git
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
index f2c0c82..ebf2cdd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
@@ -32,7 +32,10 @@ import org.junit.rules.ExpectedException;
import java.io.File;
import java.nio.file.Paths;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.Optional;
import static
org.apache.drill.exec.util.StoragePluginTestUtils.DFS_PLUGIN_NAME;
import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
@@ -52,8 +55,18 @@ public class TestCTTAS extends BaseTestQuery {
File tmp2 = dirTestWatcher.makeSubDir(Paths.get("tmp2"));
StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
FileSystemConfig pluginConfig = (FileSystemConfig)
pluginRegistry.getPlugin(DFS_PLUGIN_NAME).getConfig();
- pluginConfig.workspaces.put(temp2_wk, new
WorkspaceConfig(tmp2.getAbsolutePath(), true, null, false));
- pluginRegistry.createOrUpdate(DFS_PLUGIN_NAME, pluginConfig, true);
+
+ Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
+ Optional.ofNullable(pluginConfig.getWorkspaces())
+ .ifPresent(newWorkspaces::putAll);
+ newWorkspaces.put(temp2_wk, new WorkspaceConfig(tmp2.getAbsolutePath(),
true, null, false));
+
+ FileSystemConfig newPluginConfig = new FileSystemConfig(
+ pluginConfig.getConnection(),
+ pluginConfig.getConfig(),
+ newWorkspaces,
+ pluginConfig.getFormats());
+ pluginRegistry.createOrUpdate(DFS_PLUGIN_NAME, newPluginConfig, true);
}
@Test
diff --git
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
index e0ed2fb..3932d7e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
@@ -420,7 +420,7 @@ public class TestInfoSchema extends BaseTestQuery {
assertEquals("file", configMap.get("type"));
final FileSystemConfig testConfig = (FileSystemConfig)
bits[0].getContext().getStorage().getPlugin("dfs").getConfig();
- final String tmpSchemaLocation =
testConfig.workspaces.get("tmp").getLocation();
+ final String tmpSchemaLocation =
testConfig.getWorkspaces().get("tmp").getLocation();
assertEquals(tmpSchemaLocation, configMap.get("location"));
batch.release();
diff --git
a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
index 9ddcdb7..096c8cd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
@@ -30,6 +30,7 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Properties;
import org.apache.drill.test.DrillTestWrapper.TestServices;
@@ -498,13 +499,24 @@ public class ClusterFixture extends BaseFixture
implements AutoCloseable {
final FileSystemConfig pluginConfig = (FileSystemConfig)
plugin.getConfig();
final WorkspaceConfig newTmpWSConfig = new WorkspaceConfig(path, true,
defaultFormat, false);
- pluginConfig.workspaces.remove(schemaName);
- pluginConfig.workspaces.put(schemaName, newTmpWSConfig);
- if (format != null) {
- pluginConfig.formats.put(defaultFormat, format);
- }
+ Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
+ Optional.ofNullable(pluginConfig.getWorkspaces())
+ .ifPresent(newWorkspaces::putAll);
+ newWorkspaces.put(schemaName, newTmpWSConfig);
+
+ Map<String, FormatPluginConfig> newFormats = new
HashMap<>(pluginConfig.getFormats());
+ Optional.ofNullable(pluginConfig.getFormats())
+ .ifPresent(newFormats::putAll);
+ Optional.ofNullable(format)
+ .ifPresent(f -> newFormats.put(defaultFormat, f));
+
+ FileSystemConfig newPluginConfig = new FileSystemConfig(
+ pluginConfig.getConnection(),
+ pluginConfig.getConfig(),
+ newWorkspaces,
+ newFormats);
- pluginRegistry.createOrUpdate(pluginName, pluginConfig, true);
+ pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
}
public static final String EXPLAIN_PLAN_TEXT = "text";