paul-rogers commented on a change in pull request #2364:
URL: https://github.com/apache/drill/pull/2364#discussion_r761641766
##########
File path:
exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
##########
@@ -19,64 +19,82 @@
import java.io.IOException;
import java.io.OutputStream;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
-import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.ops.QueryContext.SqlStatementType;
+import
org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileReaderFactory;
+import
org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanBuilder;
+import
org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
import org.apache.drill.exec.planner.common.DrillStatsTable;
import org.apache.drill.exec.planner.common.DrillStatsTable.TableStatistics;
import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.options.OptionSet;
import org.apache.drill.exec.store.RecordReader;
import org.apache.drill.exec.store.RecordWriter;
import org.apache.drill.exec.store.StatisticsRecordWriter;
import org.apache.drill.exec.store.dfs.DrillFileSystem;
import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
+import org.apache.drill.exec.store.dfs.easy.EasySubScan;
import org.apache.drill.exec.store.dfs.easy.EasyWriter;
import org.apache.drill.exec.store.dfs.easy.FileWork;
-import org.apache.drill.exec.store.easy.json.JSONFormatPlugin.JSONFormatConfig;
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
-import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
+import static
org.apache.drill.exec.store.easy.json.JSONFormatConfig.PLUGIN_NAME;
+
public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
private static final Logger logger =
LoggerFactory.getLogger(JSONFormatPlugin.class);
- public static final String DEFAULT_NAME = "json";
-
private static final boolean IS_COMPRESSIBLE = true;
- public static final String OPERATOR_TYPE = "JSON_SUB_SCAN";
+ public static final String READER_OPERATOR_TYPE = "JSON_SUB_SCAN";
+ public static final String WRITER_OPERATOR_TYPE = "JSON_WRITER";
public JSONFormatPlugin(String name, DrillbitContext context,
Configuration fsConf, StoragePluginConfig storageConfig) {
this(name, context, fsConf, storageConfig, new JSONFormatConfig(null));
}
- public JSONFormatPlugin(String name, DrillbitContext context,
- Configuration fsConf, StoragePluginConfig config, JSONFormatConfig
formatPluginConfig) {
- super(name, context, fsConf, config, formatPluginConfig, true,
- false, false, IS_COMPRESSIBLE, formatPluginConfig.getExtensions(),
DEFAULT_NAME);
+ public JSONFormatPlugin(String name, DrillbitContext context, Configuration
fsConf,
+ StoragePluginConfig config, JSONFormatConfig formatPluginConfig) {
+ super(name, easyConfig(fsConf, formatPluginConfig), context, config,
formatPluginConfig);
+ }
+
+ private static EasyFormatConfig easyConfig(Configuration fsConf,
JSONFormatConfig pluginConfig) {
+ return EasyFormatConfig.builder()
+ .readable(true)
+ .writable(true)
+ .blockSplittable(false)
+ .compressible(IS_COMPRESSIBLE)
+ .supportsProjectPushdown(true)
+ .extensions(pluginConfig.getExtensions())
+ .fsConf(fsConf)
+ .defaultName(PLUGIN_NAME)
+ .readerOperatorType(READER_OPERATOR_TYPE)
+ .writerOperatorType(WRITER_OPERATOR_TYPE)
Review comment:
Sorry, I saw this in the PR description:
> The new "V2" JSON scan is controlled by a new option:
> store.json.enable_v2_reader, which is false by default in this PR.
Which I thought meant that the V2 reader is not enabled by default, hence
the suggested comment.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]