[ 
https://issues.apache.org/jira/browse/DRILL-6424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16481030#comment-16481030
 ] 

ASF GitHub Bot commented on DRILL-6424:
---------------------------------------

asfgit closed pull request #1274: DRILL-6424: Updating FasterXML Jackson 
libraries
URL: https://github.com/apache/drill/pull/1274
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/contrib/storage-hive/hive-exec-shade/pom.xml 
b/contrib/storage-hive/hive-exec-shade/pom.xml
index ad572efaff..6f511adf71 100644
--- a/contrib/storage-hive/hive-exec-shade/pom.xml
+++ b/contrib/storage-hive/hive-exec-shade/pom.xml
@@ -167,23 +167,4 @@
       </plugin>
     </plugins>
   </build>
-  <profiles>
-    <profile>
-      <id>mapr</id>
-      <properties>
-        <!-- TODO: MapR Hive 2.1 client version libraries use older jackson 
libraries than Hive 2.3 client.
-        It can be removed after updating onto 2.3 version -->
-        
<jackson.databind.mapr.hive.version>2.4.2</jackson.databind.mapr.hive.version>
-      </properties>
-      <dependencyManagement>
-        <dependencies>
-          <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>${jackson.databind.mapr.hive.version}</version>
-          </dependency>
-        </dependencies>
-      </dependencyManagement>
-    </profile>
-  </profiles>
 </project>
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonRecordWriter.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonRecordWriter.java
index b350d57783..bd81578870 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonRecordWriter.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonRecordWriter.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.store.easy.json;
 
 import java.io.IOException;
+import java.io.OutputStream;
 import java.util.List;
 import java.util.Map;
 
@@ -33,7 +34,6 @@
 import org.apache.drill.exec.vector.complex.fn.JsonWriter;
 import org.apache.drill.exec.vector.complex.reader.FieldReader;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
@@ -56,7 +56,7 @@
 
   private int index;
   private FileSystem fs = null;
-  private FSDataOutputStream stream = null;
+  private OutputStream stream = null;
 
   private final JsonFactory factory = new JsonFactory();
   private final StorageStrategy storageStrategy;
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/Metadata.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/Metadata.java
index cdf98e605b..ab655e9217 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/Metadata.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/Metadata.java
@@ -40,8 +40,6 @@
 import org.apache.drill.exec.util.DrillFileSystemUtil;
 import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -60,6 +58,8 @@
 
 import javax.annotation.Nullable;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Iterator;
@@ -542,7 +542,7 @@ private void writeFile(ParquetTableMetadata_v3 
parquetTableMetadata, Path p, Fil
     SimpleModule module = new SimpleModule();
     module.addSerializer(ColumnMetadata_v3.class, new 
ColumnMetadata_v3.Serializer());
     mapper.registerModule(module);
-    FSDataOutputStream os = fs.create(p);
+    OutputStream os = fs.create(p);
     mapper.writerWithDefaultPrettyPrinter().writeValue(os, 
parquetTableMetadata);
     os.flush();
     os.close();
@@ -555,7 +555,7 @@ private void writeFile(ParquetTableMetadataDirs 
parquetTableMetadataDirs, Path p
     ObjectMapper mapper = new ObjectMapper(jsonFactory);
     SimpleModule module = new SimpleModule();
     mapper.registerModule(module);
-    FSDataOutputStream os = fs.create(p);
+    OutputStream os = fs.create(p);
     mapper.writerWithDefaultPrettyPrinter().writeValue(os, 
parquetTableMetadataDirs);
     os.flush();
     os.close();
@@ -586,7 +586,7 @@ private void readBlockMeta(Path path, boolean dirsOnly, 
MetadataContext metaCont
     mapper.registerModule(serialModule);
     mapper.registerModule(module);
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    try (FSDataInputStream is = fs.open(path)) {
+    try (InputStream is = fs.open(path)) {
       boolean alreadyCheckedModification;
       boolean newMetadata = false;
       alreadyCheckedModification = 
metaContext.getStatus(metadataParentDirPath);
diff --git 
a/logical/src/main/java/org/apache/drill/common/logical/data/Sequence.java 
b/logical/src/main/java/org/apache/drill/common/logical/data/Sequence.java
deleted file mode 100644
index c9d68ff0e4..0000000000
--- a/logical/src/main/java/org/apache/drill/common/logical/data/Sequence.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.common.logical.data;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.drill.common.logical.data.Sequence.De;
-import org.apache.drill.common.logical.data.visitors.LogicalVisitor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.fasterxml.jackson.annotation.ObjectIdGenerator;
-import com.fasterxml.jackson.annotation.ObjectIdGenerators;
-import com.fasterxml.jackson.core.JsonLocation;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.core.JsonToken;
-import com.fasterxml.jackson.databind.DeserializationContext;
-import com.fasterxml.jackson.databind.JavaType;
-import com.fasterxml.jackson.databind.JsonDeserializer;
-import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
-import com.fasterxml.jackson.databind.deser.impl.ReadableObjectId;
-import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
-import com.google.common.collect.Iterators;
-
-// TODO - is this even ever used anymore? I don't believe the planner will ever
-// generate this, we might have some tests with old logical plans that use this
-// but it should probably be removed
-/**
- * Describes a list of operators where each operator only has one input and 
that
- * input is the operator that came before.
- *
- */
-@Deprecated
-@JsonDeserialize(using = De.class)
-@JsonTypeName("sequence")
-public class Sequence extends LogicalOperatorBase {
-  static final Logger logger = LoggerFactory.getLogger(Sequence.class);
-
-  private Sequence() {}
-
-  public boolean openTop;
-  public LogicalOperator input;
-  @JsonProperty("do")
-  public List<LogicalOperator> stream;
-
-    @Override
-    public <T, X, E extends Throwable> T accept(LogicalVisitor<T, X, E> 
logicalVisitor, X value) throws E {
-        return logicalVisitor.visitSequence(this, value);
-    }
-
-    @Override
-    public Iterator<LogicalOperator> iterator() {
-        return Iterators.singletonIterator(stream.get(stream.size() - 1));
-    }
-
-    public static class De extends StdDeserializer<LogicalOperator> {
-
-    protected De() {
-      super(Sequence.class);
-    }
-
-    @Override
-    public LogicalOperator deserialize(JsonParser jp, DeserializationContext 
ctxt) throws IOException,
-        JsonProcessingException {
-      ObjectIdGenerator<Integer> idGenerator = new 
ObjectIdGenerators.IntSequenceGenerator();
-      JsonLocation start = jp.getCurrentLocation();
-      JsonToken t = jp.getCurrentToken();
-      LogicalOperator parent = null;
-      LogicalOperator first = null;
-      LogicalOperator prev = null;
-      Integer id = null;
-
-      while (true) {
-        String fieldName = jp.getText();
-        t = jp.nextToken();
-        switch (fieldName) { // switch on field names.
-        case "@id":
-          id = _parseIntPrimitive(jp, ctxt);
-          break;
-        case "input":
-          JavaType tp = ctxt.constructType(LogicalOperator.class);
-          JsonDeserializer<Object> d = ctxt.findRootValueDeserializer(tp);
-          parent = (LogicalOperator) d.deserialize(jp, ctxt);
-          break;
-
-        case "do":
-          if (!jp.isExpectedStartArrayToken()) {
-            throwE(
-                jp,
-                "The do parameter of sequence should be an array of 
SimpleOperators.  Expected a JsonToken.START_ARRAY token but received a "
-                    + t.name() + "token.");
-          }
-
-          int pos = 0;
-          while ((t = jp.nextToken()) != JsonToken.END_ARRAY) {
-            // logger.debug("Reading sequence child {}.", pos);
-            JsonLocation l = jp.getCurrentLocation(); // get current location
-                                                      // first so we can
-                                                      // correctly reference 
the
-                                                      // start of the object in
-                                                      // the case that the type
-                                                      // is wrong.
-            LogicalOperator o = jp.readValueAs(LogicalOperator.class);
-
-            if (pos == 0) {
-              if (!(o instanceof SingleInputOperator) && !(o instanceof 
SourceOperator)) {
-                throwE(
-                    l,
-                    "The first operator in a sequence must be either a 
ZeroInput or SingleInput operator.  The provided first operator was not. It was 
of type "
-                        + o.getClass().getName());
-              }
-              first = o;
-            } else {
-              if (!(o instanceof SingleInputOperator)) {
-                throwE(l, "All operators after the first must be single input 
operators.  The operator at position "
-                    + pos + " was not. It was of type " + 
o.getClass().getName());
-              }
-              SingleInputOperator now = (SingleInputOperator) o;
-              now.setInput(prev);
-            }
-            prev = o;
-
-            pos++;
-          }
-          break;
-        default:
-          throwE(jp, "Unknown field name provided for Sequence: " + 
jp.getText());
-        }
-
-        t = jp.nextToken();
-        if (t == JsonToken.END_OBJECT) {
-          break;
-        }
-      }
-
-      if (first == null) {
-        throwE(start, "A sequence must include at least one operator.");
-      }
-      if ((parent == null && first instanceof SingleInputOperator)
-          || (parent != null && first instanceof SourceOperator)) {
-        throwE(start,
-            "A sequence must either start with a ZeroInputOperator or have a 
provided input. It cannot have both or neither.");
-      }
-
-      if (parent != null && first instanceof SingleInputOperator) {
-        ((SingleInputOperator) first).setInput(parent);
-      }
-
-      // set input reference.
-      if (id != null) {
-
-        ReadableObjectId rid = ctxt.findObjectId(id, idGenerator);
-        rid.bindItem(prev);
-        // logger.debug("Binding id {} to item {}.", rid.id, rid.item);
-
-      }
-
-      return first;
-    }
-
-  }
-
-  private static void throwE(JsonLocation l, String e) throws 
JsonParseException {
-    throw new JsonParseException(e, l);
-  }
-
-  private static void throwE(JsonParser jp, String e) throws 
JsonParseException {
-    throw new JsonParseException(e, jp.getCurrentLocation());
-  }
-
-}
diff --git 
a/logical/src/main/java/org/apache/drill/common/logical/data/visitors/AbstractLogicalVisitor.java
 
b/logical/src/main/java/org/apache/drill/common/logical/data/visitors/AbstractLogicalVisitor.java
index 4fd64c5206..482146fdb4 100644
--- 
a/logical/src/main/java/org/apache/drill/common/logical/data/visitors/AbstractLogicalVisitor.java
+++ 
b/logical/src/main/java/org/apache/drill/common/logical/data/visitors/AbstractLogicalVisitor.java
@@ -30,7 +30,6 @@
 import org.apache.drill.common.logical.data.Project;
 import org.apache.drill.common.logical.data.RunningAggregate;
 import org.apache.drill.common.logical.data.Scan;
-import org.apache.drill.common.logical.data.Sequence;
 import org.apache.drill.common.logical.data.Store;
 import org.apache.drill.common.logical.data.Transform;
 import org.apache.drill.common.logical.data.Union;
@@ -96,11 +95,6 @@ public T visitGroupingAggregate(GroupingAggregate groupBy, X 
value) throws E {
       return visitOp(groupBy, value);
     }
 
-    @Override
-    public T visitSequence(Sequence sequence, X value) throws E {
-        return visitOp(sequence, value);
-    }
-
     @Override
     public T visitTransform(Transform transform, X value) throws E {
         return visitOp(transform, value);
diff --git 
a/logical/src/main/java/org/apache/drill/common/logical/data/visitors/LogicalVisitor.java
 
b/logical/src/main/java/org/apache/drill/common/logical/data/visitors/LogicalVisitor.java
index 55fa83880f..9d9013e797 100644
--- 
a/logical/src/main/java/org/apache/drill/common/logical/data/visitors/LogicalVisitor.java
+++ 
b/logical/src/main/java/org/apache/drill/common/logical/data/visitors/LogicalVisitor.java
@@ -30,12 +30,10 @@
 import org.apache.drill.common.logical.data.Project;
 import org.apache.drill.common.logical.data.RunningAggregate;
 import org.apache.drill.common.logical.data.Scan;
-import org.apache.drill.common.logical.data.Sequence;
 import org.apache.drill.common.logical.data.Store;
 import org.apache.drill.common.logical.data.Transform;
 import org.apache.drill.common.logical.data.Union;
 import org.apache.drill.common.logical.data.Window;
-import org.apache.drill.common.logical.data.Window;
 import org.apache.drill.common.logical.data.Writer;
 
 /**
@@ -60,7 +58,6 @@
     public RETURN visitJoin(Join join, EXTRA value) throws EXCEP;
     public RETURN visitLimit(Limit limit, EXTRA value) throws EXCEP;
     public RETURN visitRunningAggregate(RunningAggregate runningAggregate, 
EXTRA value) throws EXCEP;
-    public RETURN visitSequence(Sequence sequence, EXTRA value) throws EXCEP;
     public RETURN visitTransform(Transform transform, EXTRA value) throws 
EXCEP;
     public RETURN visitUnion(Union union, EXTRA value) throws EXCEP;
     public RETURN visitWindow(Window window, EXTRA value) throws EXCEP;
diff --git a/pom.xml b/pom.xml
index 43207c104d..620f73c5de 100644
--- a/pom.xml
+++ b/pom.xml
@@ -49,8 +49,8 @@
     <avatica.version>1.11.0</avatica.version>
     <janino.version>2.7.6</janino.version>
     <sqlline.version>1.1.9-drill-r7</sqlline.version>
-    <jackson.version>2.7.9</jackson.version>
-    <jackson.databind.version>2.7.9.1</jackson.databind.version>
+    <jackson.version>2.9.5</jackson.version>
+    <jackson.databind.version>2.9.5</jackson.databind.version>
     <mapr.release.version>5.2.1-mapr</mapr.release.version>
     <ojai.version>1.1</ojai.version>
     <kerby.version>1.0.0-RC2</kerby.version>


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> Updating FasterXML Jackson libraries
> ------------------------------------
>
>                 Key: DRILL-6424
>                 URL: https://issues.apache.org/jira/browse/DRILL-6424
>             Project: Apache Drill
>          Issue Type: Improvement
>          Components: Storage - Hive, Tools, Build &amp; Test
>    Affects Versions: 1.13.0
>            Reporter: Vitalii Diravka
>            Assignee: Vitalii Diravka
>            Priority: Major
>              Labels: ready-to-commit
>             Fix For: 1.14.0
>
>
> Drill uses jackson 2.7.9 and jackson-databind 2.7.9.1 versions.
> There are newer versions of theses libraries, which are more stable and 
> involves different improvements.
> hive-exec-shaded for mapr profile leverages even older 2.4.2 jackson-databind 
> library. It can be updated too.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to