This is an automated email from the ASF dual-hosted git repository.

imaxon pushed a commit to branch release-0.9.5
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 4a20fb28d9e4ac55fa1b61bdc3baadc599aac201
Author: Ian Maxon <ian@maxons.email>
AuthorDate: Mon Jun 8 17:23:07 2020 -0700

    [NO ISSUE] excise log4j 1.x
    
    Change-Id: I1d1d6c5e3e5a2f051552f3518f302f6bb8b4e83d
    Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/6684
    Integration-Tests: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
    Tested-by: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
    Reviewed-by: Michael Blow <mb...@apache.org>
---
 asterixdb/asterix-active/pom.xml                   |   2 +-
 asterixdb/asterix-app/pom.xml                      |   4 +
 asterixdb/asterix-external-data/pom.xml            |   8 +-
 asterixdb/asterix-hivecompat/pom.xml               |   4 +
 asterixdb/asterix-server/pom.xml                   |   8 +-
 asterixdb/pom.xml                                  |  89 ++++++---
 hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml     |  50 ++---
 .../apache/hyracks/hdfs/dataflow/DataflowTest.java | 217 ---------------------
 .../hyracks/hdfs2/dataflow/DataflowTest.java       | 134 -------------
 hyracks-fullstack/pom.xml                          |  27 +++
 10 files changed, 132 insertions(+), 411 deletions(-)

diff --git a/asterixdb/asterix-active/pom.xml b/asterixdb/asterix-active/pom.xml
index 9b5dcbf..210345f 100644
--- a/asterixdb/asterix-active/pom.xml
+++ b/asterixdb/asterix-active/pom.xml
@@ -55,7 +55,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-api</artifactId>
+      <artifactId>log4j-1.2-api</artifactId>
     </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index a26a335..67285d5 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -865,5 +865,9 @@
       <artifactId>akka-http-core_2.12</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-external-data/pom.xml 
b/asterixdb/asterix-external-data/pom.xml
index f6ebdba..ea8afac 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -413,10 +413,6 @@
       <version>4.4</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-api</artifactId>
-    </dependency>
-    <dependency>
       <groupId>com.sun.xml.bind</groupId>
       <artifactId>jaxb-core</artifactId>
     </dependency>
@@ -468,5 +464,9 @@
       <groupId>software.amazon.awssdk</groupId>
       <artifactId>auth</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-hivecompat/pom.xml 
b/asterixdb/asterix-hivecompat/pom.xml
index bd5df93..57575c5 100644
--- a/asterixdb/asterix-hivecompat/pom.xml
+++ b/asterixdb/asterix-hivecompat/pom.xml
@@ -72,5 +72,9 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index 0db37fc..2fea765 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -698,10 +698,6 @@
       <artifactId>hyracks-util</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.logging.log4j</groupId>
-      <artifactId>log4j-api</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.apache.asterix</groupId>
       <artifactId>asterix-fuzzyjoin</artifactId>
       <version>${project.version}</version>
@@ -725,5 +721,9 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index bb3a018..38888f0 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -937,23 +937,25 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-common</artifactId>
-        <version>${hadoop.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-client</artifactId>
-        <version>${hadoop.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-client</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -980,18 +982,34 @@
             <groupId>jdk.tools</groupId>
             <artifactId>jdk.tools</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs-client</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-common</artifactId>
         <version>${hadoop.version}</version>
         <classifier>tests</classifier>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -1002,17 +1020,33 @@
             <groupId>jdk.tools</groupId>
             <artifactId>jdk.tools</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-annotations</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-minicluster</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -1024,6 +1058,10 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1035,6 +1073,10 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1061,6 +1103,10 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1072,6 +1118,10 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1275,25 +1325,6 @@
         <version>1.7.4</version>
       </dependency>
       <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <version>1.2.17</version>
-        <exclusions>
-          <exclusion>
-            <groupId>com.sun.jmx</groupId>
-            <artifactId>jmxri</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>com.sun.jdmk</groupId>
-            <artifactId>jmxtools</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>javax.jms</groupId>
-            <artifactId>jms</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
         <version>3.4.13</version>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml 
b/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
index 9528eaa..49a6a93 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/pom.xml
@@ -60,7 +60,10 @@
         <configuration>
           <failOnWarning>true</failOnWarning>
           <outputXML>true</outputXML>
-          
<ignoredUnusedDeclaredDependencies>org.apache.hadoop:hadoop*::</ignoredUnusedDeclaredDependencies>
+          <ignoredDependencies>
+            <ignoredDependency>org.apache.hadoop:hadoop*::</ignoredDependency>
+            
<ignoredDependency>org.apache.logging.log4j:log4j*::</ignoredDependency>
+          </ignoredDependencies>
         </configuration>
         <executions>
           <execution>
@@ -87,11 +90,6 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-data-std</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-common</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
@@ -108,6 +106,10 @@
           <groupId>javax.servlet</groupId>
           <artifactId>servlet-api</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -118,20 +120,25 @@
           <groupId>com.sun.jersey.jersey-test-framework</groupId>
           <artifactId>jersey-test-framework-grizzly2</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
       <type>test-jar</type>
       <classifier>tests</classifier>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -140,6 +147,12 @@
       <type>test-jar</type>
       <classifier>tests</classifier>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -177,11 +190,6 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>commons-io</groupId>
-      <artifactId>commons-io</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
@@ -189,12 +197,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-util</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-ipc</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -208,5 +210,9 @@
       <version>1.0.b2</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git 
a/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
 
b/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
deleted file mode 100644
index f33f2a3..0000000
--- 
a/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.hdfs.dataflow;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.data.std.accessors.RawBinaryComparatorFactory;
-import 
org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import 
org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import 
org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
-import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
-import org.apache.hyracks.hdfs.lib.TextKeyValueParserFactory;
-import org.apache.hyracks.hdfs.lib.TextTupleWriterFactory;
-import org.apache.hyracks.hdfs.scheduler.Scheduler;
-import org.apache.hyracks.hdfs.utils.HyracksUtils;
-import org.apache.hyracks.ipc.impl.HyracksConnection;
-import org.apache.hyracks.test.support.TestUtils;
-import org.apache.hyracks.util.file.FileUtil;
-import org.junit.Assert;
-
-import junit.framework.TestCase;
-
-/**
- * Test the org.apache.hyracks.hdfs.dataflow package,
- * the operators for the Hadoop old API.
- */
-@SuppressWarnings({ "deprecation" })
-public class DataflowTest extends TestCase {
-
-    protected static final String ACTUAL_RESULT_DIR = 
FileUtil.joinPath("target", "actual");
-    private static final String TEST_RESOURCES = FileUtil.joinPath("src", 
"test", "resources");
-    protected static final String EXPECTED_RESULT_PATH = 
FileUtil.joinPath(TEST_RESOURCES, "expected");
-    private static final String PATH_TO_HADOOP_CONF = 
FileUtil.joinPath(TEST_RESOURCES, "hadoop", "conf");
-    protected static final String BUILD_DIR = FileUtil.joinPath("target", 
"build");
-
-    private static final String DATA_PATH = FileUtil.joinPath(TEST_RESOURCES, 
"data", "customer.tbl");
-    protected static final String HDFS_INPUT_PATH = "/customer/";
-    protected static final String HDFS_OUTPUT_PATH = "/customer_result/";
-
-    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + 
File.separator + "conf.xml";
-    private static final String MINIDFS_BASEDIR = FileUtil.joinPath("target", 
"hdfs");
-    private MiniDFSCluster dfsCluster;
-
-    private JobConf conf = new JobConf();
-    private int numberOfNC = 2;
-
-    @Override
-    public void setUp() throws Exception {
-        cleanupStores();
-        HyracksUtils.init();
-        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
-        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
-        startHDFS();
-    }
-
-    private void cleanupStores() throws IOException {
-        FileUtils.forceMkdir(new File(MINIDFS_BASEDIR));
-        FileUtils.cleanDirectory(new File(MINIDFS_BASEDIR));
-    }
-
-    protected Configuration getConfiguration() {
-        return conf;
-    }
-
-    protected MiniDFSCluster getMiniDFSCluster(Configuration conf, int 
numberOfNC) throws IOException {
-        return new MiniDFSCluster(conf, numberOfNC, true, null);
-    }
-
-    /**
-     * Start the HDFS cluster and setup the data files
-     *
-     * @throws IOException
-     */
-    protected void startHDFS() throws IOException {
-        getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + 
"/core-site.xml"));
-        getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + 
"/mapred-site.xml"));
-        getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + 
"/hdfs-site.xml"));
-
-        FileSystem lfs = FileSystem.getLocal(new Configuration());
-        lfs.delete(new Path(BUILD_DIR), true);
-        System.setProperty("hadoop.log.dir", FileUtil.joinPath("target", 
"logs"));
-        getConfiguration().set("hdfs.minidfs.basedir", MINIDFS_BASEDIR);
-        dfsCluster = getMiniDFSCluster(getConfiguration(), numberOfNC);
-        FileSystem dfs = FileSystem.get(getConfiguration());
-        Path src = new Path(DATA_PATH);
-        Path dest = new Path(HDFS_INPUT_PATH);
-        Path result = new Path(HDFS_OUTPUT_PATH);
-        dfs.mkdirs(dest);
-        dfs.mkdirs(result);
-        dfs.copyFromLocalFile(src, dest);
-
-        DataOutputStream confOutput = new DataOutputStream(new 
FileOutputStream(new File(HADOOP_CONF_PATH)));
-        getConfiguration().writeXml(confOutput);
-        confOutput.flush();
-        confOutput.close();
-    }
-
-    /**
-     * Test a job with only HDFS read and writes.
-     *
-     * @throws Exception
-     */
-    public void testHDFSReadWriteOperators() throws Exception {
-        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
-        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
-        conf.setInputFormat(TextInputFormat.class);
-
-        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, 
HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
-        InputSplit[] splits = conf.getInputFormat().getSplits(conf, numberOfNC 
* 4);
-
-        String[] readSchedule = scheduler.getLocationConstraints(splits);
-        JobSpecification jobSpec = new JobSpecification();
-        RecordDescriptor recordDesc =
-                new RecordDescriptor(new ISerializerDeserializer[] { new 
UTF8StringSerializerDeserializer() });
-
-        String[] locations =
-                new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, 
HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
-        HDFSReadOperatorDescriptor readOperator = new 
HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
-                readSchedule, new TextKeyValueParserFactory());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
readOperator, locations);
-
-        ExternalSortOperatorDescriptor sortOperator = new 
ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
-                new IBinaryComparatorFactory[] { 
RawBinaryComparatorFactory.INSTANCE }, recordDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
sortOperator, locations);
-
-        HDFSWriteOperatorDescriptor writeOperator =
-                new HDFSWriteOperatorDescriptor(jobSpec, conf, new 
TextTupleWriterFactory());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
writeOperator, HyracksUtils.NC1_ID);
-
-        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), 
readOperator, 0, sortOperator, 0);
-        jobSpec.connect(
-                new MToNPartitioningMergingConnectorDescriptor(jobSpec,
-                        new FieldHashPartitionComputerFactory(new int[] { 0 },
-                                new IBinaryHashFunctionFactory[] { 
RawBinaryHashFunctionFactory.INSTANCE }),
-                        new int[] { 0 }, new IBinaryComparatorFactory[] { 
RawBinaryComparatorFactory.INSTANCE }, null),
-                sortOperator, 0, writeOperator, 0);
-        jobSpec.addRoot(writeOperator);
-
-        IHyracksClientConnection client =
-                new HyracksConnection(HyracksUtils.CC_HOST, 
HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
-        JobId jobId = client.startJob(jobSpec);
-        client.waitForCompletion(jobId);
-
-        Assert.assertEquals(true, checkResults());
-    }
-
-    /**
-     * Check if the results are correct
-     *
-     * @return true if correct
-     * @throws Exception
-     */
-    protected boolean checkResults() throws Exception {
-        FileSystem dfs = FileSystem.get(getConfiguration());
-        Path result = new Path(HDFS_OUTPUT_PATH);
-        Path actual = new Path(ACTUAL_RESULT_DIR);
-        dfs.copyToLocalFile(result, actual);
-
-        TestUtils.compareWithResult(new 
File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")),
-                new File(FileUtil.joinPath(ACTUAL_RESULT_DIR, 
"customer_result", "part-0")));
-        return true;
-    }
-
-    /**
-     * cleanup hdfs cluster
-     */
-    private void cleanupHDFS() throws Exception {
-        dfsCluster.shutdown();
-    }
-
-    @Override
-    public void tearDown() throws Exception {
-        HyracksUtils.deinit();
-        cleanupHDFS();
-    }
-
-}
diff --git 
a/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
 
b/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
deleted file mode 100644
index e6f4099..0000000
--- 
a/hyracks-fullstack/hyracks/hyracks-hdfs/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.hdfs2.dataflow;
-
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.data.std.accessors.RawBinaryComparatorFactory;
-import 
org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import 
org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
-import 
org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
-import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.hdfs.MiniDFSClusterFactory;
-import org.apache.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
-import org.apache.hyracks.hdfs.lib.TextKeyValueParserFactory;
-import org.apache.hyracks.hdfs.lib.TextTupleWriterFactory;
-import org.apache.hyracks.hdfs.utils.HyracksUtils;
-import org.apache.hyracks.hdfs2.scheduler.Scheduler;
-import org.apache.hyracks.ipc.impl.HyracksConnection;
-import org.junit.Assert;
-
-/**
- * Test the org.apache.hyracks.hdfs2.dataflow package,
- * the operators for the Hadoop new API.
- */
-public class DataflowTest extends 
org.apache.hyracks.hdfs.dataflow.DataflowTest {
-
-    private MiniDFSClusterFactory dfsClusterFactory = new 
MiniDFSClusterFactory();
-
-    private Job conf;
-
-    @Override
-    public void setUp() throws Exception {
-        conf = new Job();
-        super.setUp();
-    }
-
-    @Override
-    protected Configuration getConfiguration() {
-        return conf.getConfiguration();
-    }
-
-    @Override
-    protected MiniDFSCluster getMiniDFSCluster(Configuration conf, int 
numberOfNC) throws HyracksDataException {
-        return dfsClusterFactory.getMiniDFSCluster(conf, numberOfNC);
-    }
-
-    /**
-     * Test a job with only HDFS read and writes.
-     *
-     * @throws Exception
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    public void testHDFSReadWriteOperators() throws Exception {
-        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
-        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
-        conf.setInputFormatClass(TextInputFormat.class);
-
-        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, 
HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
-        InputFormat inputFormat = 
ReflectionUtils.newInstance(conf.getInputFormatClass(), getConfiguration());
-        List<InputSplit> splits = inputFormat.getSplits(conf);
-
-        String[] readSchedule = scheduler.getLocationConstraints(splits);
-        JobSpecification jobSpec = new JobSpecification();
-        RecordDescriptor recordDesc =
-                new RecordDescriptor(new ISerializerDeserializer[] { new 
UTF8StringSerializerDeserializer() });
-
-        String[] locations =
-                new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, 
HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
-        HDFSReadOperatorDescriptor readOperator = new 
HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
-                readSchedule, new TextKeyValueParserFactory());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
readOperator, locations);
-
-        ExternalSortOperatorDescriptor sortOperator = new 
ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
-                new IBinaryComparatorFactory[] { 
RawBinaryComparatorFactory.INSTANCE }, recordDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
sortOperator, locations);
-
-        HDFSWriteOperatorDescriptor writeOperator =
-                new HDFSWriteOperatorDescriptor(jobSpec, conf, new 
TextTupleWriterFactory());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, 
writeOperator, HyracksUtils.NC1_ID);
-
-        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), 
readOperator, 0, sortOperator, 0);
-        jobSpec.connect(
-                new MToNPartitioningMergingConnectorDescriptor(jobSpec,
-                        new FieldHashPartitionComputerFactory(new int[] { 0 },
-                                new IBinaryHashFunctionFactory[] { 
RawBinaryHashFunctionFactory.INSTANCE }),
-                        new int[] { 0 }, new IBinaryComparatorFactory[] { 
RawBinaryComparatorFactory.INSTANCE }, null),
-                sortOperator, 0, writeOperator, 0);
-        jobSpec.addRoot(writeOperator);
-
-        IHyracksClientConnection client =
-                new HyracksConnection(HyracksUtils.CC_HOST, 
HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
-        JobId jobId = client.startJob(jobSpec);
-        client.waitForCompletion(jobId);
-
-        Assert.assertEquals(true, checkResults());
-    }
-}
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index d07e1dd..020fec0 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -116,17 +116,33 @@
             <groupId>jdk.tools</groupId>
             <artifactId>jdk.tools</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-minicluster</artifactId>
         <version>${hadoop.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -139,6 +155,12 @@
         <version>${hadoop.version}</version>
         <classifier>tests</classifier>
         <scope>test</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>commons-io</groupId>
@@ -277,6 +299,11 @@
         <artifactId>maven-plugin-api</artifactId>
         <version>3.6.3</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.logging.log4j</groupId>
+        <artifactId>log4j-1.2-api</artifactId>
+        <version>2.13.1</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
   <build>

Reply via email to