OOZIE-2844 Increase stability of Oozie actions when log4j.properties is missing 
or not readable (andras.piros via pbacsko)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/cd699bf5
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/cd699bf5
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/cd699bf5

Branch: refs/heads/oya
Commit: cd699bf56d804a40d634a2976278129c8fa7ec40
Parents: 89be33b
Author: Peter Bacsko <pbac...@cloudera.com>
Authored: Thu Apr 6 16:14:14 2017 +0200
Committer: Peter Bacsko <pbac...@cloudera.com>
Committed: Thu Apr 6 16:15:50 2017 +0200

----------------------------------------------------------------------
 release-log.txt                                 |  1 +
 .../apache/oozie/action/hadoop/DistcpMain.java  | 49 +++++--------
 .../apache/oozie/action/hadoop/HiveMain.java    | 75 +++++++++-----------
 .../oozie/action/hadoop/LauncherMain.java       | 51 +++++++++++++
 .../src/main/resources/default-log4j.properties | 32 +++++++++
 .../oozie/action/hadoop/TestLauncherMain.java   | 57 +++++++++++++++
 .../org/apache/oozie/action/hadoop/PigMain.java | 62 +++++++---------
 .../oozie/action/hadoop/PigMainWithOldAPI.java  | 58 +++++++--------
 .../apache/oozie/action/hadoop/SparkMain.java   | 47 +++++-------
 .../apache/oozie/action/hadoop/SqoopMain.java   | 47 +++++-------
 10 files changed, 275 insertions(+), 204 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 845912c..060e742 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 4.4.0 release (trunk - unreleased)
 
+OOZIE-2844 Increase stability of Oozie actions when log4j.properties is 
missing or not readable (andras.piros via pbacsko)
 OOZIE-2701 Oozie to support Multiple HCatalog URIs (abhishekbafna)
 OOZIE-2850 Fix default callback notifications (asasvari via gezapeti)
 OOZIE-1283 Remove the old ssh documentation (Jan Hentschel via rkanter)

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java 
b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index fcaadef..65e7c5e 100644
--- 
a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ 
b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -24,8 +24,6 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
-import java.net.URL;
-import java.util.Properties;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
@@ -117,7 +115,7 @@ public class DistcpMain extends JavaMain {
         }
     }
 
-    public static String setUpDistcpLog4J(Configuration distcpConf) throws 
IOException {
+    private String setUpDistcpLog4J(Configuration distcpConf) throws 
IOException {
         // Logfile to capture job IDs
         String hadoopJobId = System.getProperty("oozie.launcher.job.id");
         if (hadoopJobId == null) {
@@ -126,40 +124,27 @@ public class DistcpMain extends JavaMain {
 
         String logFile = new File("distcp-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
 
-        Properties hadoopProps = new Properties();
-
-        // Preparing log4j configuration
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-            // getting hadoop log4j configuration
-            hadoopProps.load(log4jFile.openStream());
-        }
-
         String logLevel = distcpConf.get("oozie.distcp.log.level", "INFO");
         String rootLogLevel = distcpConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
 
-        hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.tools", 
logLevel + ", A, jobid");
-        hadoopProps.setProperty("log4j.additivity.org.apache.hadoop.tools", 
"false");
-        hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-        hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-        hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", 
"%-4r [%t] %-5p %c %x - %m%n");
-
-        hadoopProps.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
-        hadoopProps.setProperty("log4j.appender.jobid.file", logFile);
-        hadoopProps.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
-        
hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%-4r 
[%t] %-5p %c %x - %m%n");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", "INFO, 
jobid");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A");
+        log4jProperties.setProperty("log4j.logger.org.apache.hadoop.tools", 
logLevel + ", A, jobid");
+        
log4jProperties.setProperty("log4j.additivity.org.apache.hadoop.tools", 
"false");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%-4r 
[%t] %-5p %c %x - %m%n");
+
+        log4jProperties.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.jobid.file", logFile);
+        log4jProperties.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.jobid.layout.ConversionPattern", 
"%-4r [%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", 
"INFO, jobid");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
 
         String localProps = new File(DISTCP_LOG4J_PROPS).getAbsolutePath();
-        OutputStream os1 = new FileOutputStream(localProps);
-        try {
-            hadoopProps.store(os1, "");
-        }
-        finally {
-            os1.close();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
         }
 
         PropertyConfigurator.configure(DISTCP_LOG4J_PROPS);

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java 
b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index f314bab..6a600fa 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -29,7 +29,6 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map.Entry;
-import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 
@@ -127,7 +126,7 @@ public class HiveMain extends LauncherMain {
         return hiveConf;
     }
 
-    public static String setUpHiveLog4J(Configuration hiveConf) throws 
IOException {
+    private String setUpHiveLog4J(Configuration hiveConf) throws IOException {
         //Logfile to capture job IDs
         String hadoopJobId = System.getProperty("oozie.launcher.job.id");
         if (hadoopJobId == null) {
@@ -136,53 +135,45 @@ public class HiveMain extends LauncherMain {
 
         String logFile = new File("hive-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
 
-        Properties hadoopProps = new Properties();
-
-        // Preparing log4j configuration
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-            // getting hadoop log4j configuration
-            hadoopProps.load(log4jFile.openStream());
-        }
-
         String logLevel = hiveConf.get("oozie.hive.log.level", "INFO");
         String rootLogLevel = hiveConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
 
-        hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.hive", 
logLevel + ", A");
-        hadoopProps.setProperty("log4j.additivity.org.apache.hadoop.hive", 
"false");
-        hadoopProps.setProperty("log4j.logger.hive", logLevel + ", A");
-        hadoopProps.setProperty("log4j.additivity.hive", "false");
-        hadoopProps.setProperty("log4j.logger.DataNucleus", logLevel + ", A");
-        hadoopProps.setProperty("log4j.additivity.DataNucleus", "false");
-        hadoopProps.setProperty("log4j.logger.DataStore", logLevel + ", A");
-        hadoopProps.setProperty("log4j.additivity.DataStore", "false");
-        hadoopProps.setProperty("log4j.logger.JPOX", logLevel + ", A");
-        hadoopProps.setProperty("log4j.additivity.JPOX", "false");
-        hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-        hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-        hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
-
-        hadoopProps.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
-        hadoopProps.setProperty("log4j.appender.jobid.file", logFile);
-        hadoopProps.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
-        
hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.hive.ql.exec", 
"INFO, jobid");
-        
hadoopProps.setProperty("log4j.additivity.org.apache.hadoop.hive.ql.exec", 
"false");
-        hadoopProps.setProperty("log4j.logger.SessionState", "INFO, jobid");
-        hadoopProps.setProperty("log4j.additivity.SessionState", "false");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
-        
hadoopProps.setProperty("log4j.additivity.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "false");
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A");
+        log4jProperties.setProperty("log4j.logger.org.apache.hadoop.hive", 
logLevel + ", A");
+        log4jProperties.setProperty("log4j.additivity.org.apache.hadoop.hive", 
"false");
+        log4jProperties.setProperty("log4j.logger.hive", logLevel + ", A");
+        log4jProperties.setProperty("log4j.additivity.hive", "false");
+        log4jProperties.setProperty("log4j.logger.DataNucleus", logLevel + ", 
A");
+        log4jProperties.setProperty("log4j.additivity.DataNucleus", "false");
+        log4jProperties.setProperty("log4j.logger.DataStore", logLevel + ", 
A");
+        log4jProperties.setProperty("log4j.additivity.DataStore", "false");
+        log4jProperties.setProperty("log4j.logger.JPOX", logLevel + ", A");
+        log4jProperties.setProperty("log4j.additivity.JPOX", "false");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+
+        log4jProperties.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.jobid.file", logFile);
+        log4jProperties.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.jobid.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.hive.ql.exec", 
"INFO, jobid");
+        
log4jProperties.setProperty("log4j.additivity.org.apache.hadoop.hive.ql.exec", 
"false");
+        log4jProperties.setProperty("log4j.logger.SessionState", "INFO, 
jobid");
+        log4jProperties.setProperty("log4j.additivity.SessionState", "false");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
+        
log4jProperties.setProperty("log4j.additivity.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "false");
 
         String localProps = new File(HIVE_L4J_PROPS).getAbsolutePath();
-        OutputStream os1 = new FileOutputStream(localProps);
-        hadoopProps.store(os1, "");
-        os1.close();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
+        }
 
         localProps = new File(HIVE_EXEC_L4J_PROPS).getAbsolutePath();
-        os1 = new FileOutputStream(localProps);
-        hadoopProps.store(os1, "");
-        os1.close();
+        try (OutputStream os2 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os2, "");
+        }
+
         return logFile;
     }
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 7ae48e1..9a411ac 100644
--- 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -27,10 +27,12 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.StringWriter;
+import java.net.URL;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedHashSet;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -59,11 +61,60 @@ public abstract class LauncherMain {
     protected static String[] HADOOP_SITE_FILES = new String[]
             {"core-site.xml", "hdfs-site.xml", "mapred-site.xml", 
"yarn-site.xml"};
 
+    /**
+     * Hadoop's {@code log4j.properties} found on the classpath, if readable 
and present.
+     */
+    private static final String HADOOP_LOG4J_LOCATION = "log4j.properties";
+
+    /**
+     * Default {@code log4j.properties}, if Hadoop's one is not present or 
readable.
+     * <p>
+     * Its contents are mostly from Hadoop's {@code default-log4j.properties}.
+     */
+    private static final String DEFAULT_LOG4J_LOCATION = 
"default-log4j.properties";
+
+    protected Properties log4jProperties = new Properties();
+
     protected static void run(Class<? extends LauncherMain> klass, String[] 
args) throws Exception {
         LauncherMain main = klass.newInstance();
+        main.setupLog4jProperties();
         main.run(args);
     }
 
+    @VisibleForTesting
+    protected void setupLog4jProperties() {
+        if (tryLoadLog4jPropertiesFromResource(HADOOP_LOG4J_LOCATION)) {
+            return;
+        }
+
+        tryLoadLog4jPropertiesFromResource(DEFAULT_LOG4J_LOCATION);
+    }
+
+    private boolean tryLoadLog4jPropertiesFromResource(final String 
log4jLocation) {
+        System.out.println(String.format("INFO: loading log4j config file 
%s.", log4jLocation));
+        final URL log4jUrl = 
Thread.currentThread().getContextClassLoader().getResource(log4jLocation);
+        if (log4jUrl != null) {
+            try (final InputStream log4jStream = log4jUrl.openStream()) {
+                log4jProperties.load(log4jStream);
+
+                System.out.println(String.format("INFO: log4j config file %s 
loaded successfully.", log4jLocation));
+                return true;
+            } catch (final IOException e) {
+                System.out.println(
+                        String.format("WARN: log4j config file %s is not 
readable. Exception message is: %s",
+                                log4jLocation,
+                                e.getMessage()));
+                e.printStackTrace(System.out);
+            }
+        }
+        else {
+            System.out.println(String.format("WARN: log4j config file %s is 
not present.", log4jLocation));
+        }
+
+        System.out.println(String.format("INFO: log4j config file %s could not 
be loaded.", log4jLocation));
+        return false;
+    }
+
     protected static String getHadoopJobIds(String logFile, Pattern[] 
patterns) {
         Set<String> jobIds = new LinkedHashSet<String>();
         if (!new File(logFile).exists()) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/oozie/src/main/resources/default-log4j.properties
----------------------------------------------------------------------
diff --git a/sharelib/oozie/src/main/resources/default-log4j.properties 
b/sharelib/oozie/src/main/resources/default-log4j.properties
new file mode 100644
index 0000000..542adf4
--- /dev/null
+++ b/sharelib/oozie/src/main/resources/default-log4j.properties
@@ -0,0 +1,32 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License. See accompanying LICENSE file.
+#
+
+log4j.rootLogger=${hadoop.root.logger}
+hadoop.root.logger=INFO,console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherMain.java
 
b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherMain.java
new file mode 100644
index 0000000..1e23dbd
--- /dev/null
+++ 
b/sharelib/oozie/src/test/java/org/apache/oozie/action/hadoop/TestLauncherMain.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class TestLauncherMain {
+    private final ByteArrayOutputStream outContent = new 
ByteArrayOutputStream();
+
+    @Before
+    public void setUpStreams() {
+        System.setOut(new PrintStream(outContent));
+    }
+
+    @After
+    public void cleanUpStreams() {
+        System.setOut(null);
+    }
+
+    @Test
+    public void testLog4jPropertiesPresentAndReadable() {
+        final LauncherMain noop = new NoopLauncherMain();
+        noop.setupLog4jProperties();
+
+        assertTrue(outContent.toString().contains("INFO: log4j config file 
log4j.properties loaded successfully."));
+        assertEquals(noop.log4jProperties.size(), 5);
+    }
+
+    private static class NoopLauncherMain extends LauncherMain {
+        @Override
+        protected void run(String[] args) throws Exception {}
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java 
b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index ba393ee..11cc7ee 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -49,7 +49,6 @@ import java.util.List;
 import java.util.ArrayList;
 import java.util.Properties;
 import java.util.Set;
-import java.net.URL;
 import java.util.regex.Pattern;
 
 public class PigMain extends LauncherMain {
@@ -162,43 +161,34 @@ public class PigMain extends LauncherMain {
 
         String logFile = new File("pig-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
 
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-
-            String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO");
-            String rootLogLevel = actionConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
-
-            // append required PIG properties to the default hadoop log4j file
-            Properties hadoopProps = new Properties();
-            hadoopProps.load(log4jFile.openStream());
-            hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A, 
B");
-            hadoopProps.setProperty("log4j.logger.org.apache.pig", pigLogLevel 
+ ", A, B");
-            hadoopProps.setProperty("log4j.additivity.org.apache.pig", 
"false");
-            hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-            hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-            
hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", "%d [%t] 
%-5p %c %x - %m%n");
-            hadoopProps.setProperty("log4j.appender.B", 
"org.apache.log4j.FileAppender");
-            hadoopProps.setProperty("log4j.appender.B.file", logFile);
-            hadoopProps.setProperty("log4j.appender.B.layout", 
"org.apache.log4j.PatternLayout");
-            
hadoopProps.setProperty("log4j.appender.B.layout.ConversionPattern", "%d [%t] 
%-5p %c %x - %m%n");
-            
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, B");
-
-            String localProps = new 
File("piglog4j.properties").getAbsolutePath();
-            OutputStream os1 = new FileOutputStream(localProps);
-            hadoopProps.store(os1, "");
-            os1.close();
-
-            arguments.add("-log4jconf");
-            arguments.add(localProps);
-
-            // print out current directory
-            File localDir = new File(localProps).getParentFile();
-            System.out.println("Current (local) dir = " + 
localDir.getAbsolutePath());
-        }
-        else {
-            System.out.println("log4jfile is null");
+        String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO");
+        String rootLogLevel = actionConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
+
+        // append required PIG properties to the default hadoop log4j file
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A, 
B");
+        log4jProperties.setProperty("log4j.logger.org.apache.pig", pigLogLevel 
+ ", A, B");
+        log4jProperties.setProperty("log4j.additivity.org.apache.pig", 
"false");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.appender.B", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.B.file", logFile);
+        log4jProperties.setProperty("log4j.appender.B.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.B.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, B");
+
+        String localProps = new File("piglog4j.properties").getAbsolutePath();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
         }
 
+        arguments.add("-log4jconf");
+        arguments.add(localProps);
+
+        // print out current directory
+        File localDir = new File(localProps).getParentFile();
+        System.out.println("Current (local) dir = " + 
localDir.getAbsolutePath());
+
         String pigLog = "pig-" + hadoopJobId + ".log";
         arguments.add("-logfile");
         arguments.add(pigLog);

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
----------------------------------------------------------------------
diff --git 
a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
 
b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
index 14ab65e..503d0eb 100644
--- 
a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
+++ 
b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMainWithOldAPI.java
@@ -35,7 +35,6 @@ import java.util.List;
 import java.util.ArrayList;
 import java.util.Properties;
 import java.util.Set;
-import java.net.URL;
 
 public class PigMainWithOldAPI extends LauncherMain {
     private static final Set<String> DISALLOWED_PIG_OPTIONS = new 
HashSet<String>();
@@ -149,41 +148,32 @@ public class PigMainWithOldAPI extends LauncherMain {
 
         String logFile = new File("pig-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
 
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-
-            String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO");
-            String rootLogLevel = actionConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
-
-            // append required PIG properties to the default hadoop log4j file
-            Properties hadoopProps = new Properties();
-            hadoopProps.load(log4jFile.openStream());
-            hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A, 
B");
-            hadoopProps.setProperty("log4j.logger.org.apache.pig", pigLogLevel 
+ ", A, B");
-            hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-            hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-            
hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", "%d [%t] 
%-5p %c %x - %m%n");
-            hadoopProps.setProperty("log4j.appender.B", 
"org.apache.log4j.FileAppender");
-            hadoopProps.setProperty("log4j.appender.B.file", logFile);
-            hadoopProps.setProperty("log4j.appender.B.layout", 
"org.apache.log4j.PatternLayout");
-            
hadoopProps.setProperty("log4j.appender.B.layout.ConversionPattern", "%d [%t] 
%-5p %c %x - %m%n");
-
-            String localProps = new 
File("piglog4j.properties").getAbsolutePath();
-            OutputStream os1 = new FileOutputStream(localProps);
-            hadoopProps.store(os1, "");
-            os1.close();
-
-            arguments.add("-log4jconf");
-            arguments.add(localProps);
-
-            // print out current directory
-            File localDir = new File(localProps).getParentFile();
-            System.out.println("Current (local) dir = " + 
localDir.getAbsolutePath());
-        }
-        else {
-            System.out.println("log4jfile is null");
+        String pigLogLevel = actionConf.get("oozie.pig.log.level", "INFO");
+        String rootLogLevel = actionConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
+
+        // append required PIG properties to the default hadoop log4j file
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A, 
B");
+        log4jProperties.setProperty("log4j.logger.org.apache.pig", pigLogLevel 
+ ", A, B");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.appender.B", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.B.file", logFile);
+        log4jProperties.setProperty("log4j.appender.B.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.B.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+
+        String localProps = new File("piglog4j.properties").getAbsolutePath();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
         }
 
+        arguments.add("-log4jconf");
+        arguments.add(localProps);
+
+        // print out current directory
+        File localDir = new File(localProps).getParentFile();
+        System.out.println("Current (local) dir = " + 
localDir.getAbsolutePath());
+
         String pigLog = "pig-" + hadoopJobId + ".log";
         arguments.add("-logfile");
         arguments.add(pigLog);

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java 
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index 88de5c4..ffa934a 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -24,14 +24,12 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
 import java.util.jar.JarFile;
 import java.util.jar.Manifest;
 import java.util.regex.Pattern;
@@ -413,47 +411,36 @@ public class SparkMain extends LauncherMain {
         return result;
     }
 
-    public static String setUpSparkLog4J(Configuration distcpConf) throws 
IOException {
+    private String setUpSparkLog4J(Configuration distcpConf) throws 
IOException {
         // Logfile to capture job IDs
         String hadoopJobId = System.getProperty("oozie.launcher.job.id");
         if (hadoopJobId == null) {
             throw new RuntimeException("Launcher Hadoop Job ID system,property 
not set");
         }
         String logFile = new File("spark-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
-        Properties hadoopProps = new Properties();
-
-        // Preparing log4j configuration
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-            // getting hadoop log4j configuration
-            hadoopProps.load(log4jFile.openStream());
-        }
 
         String logLevel = distcpConf.get("oozie.spark.log.level", "INFO");
         String rootLogLevel = distcpConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
 
-        hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A");
-        hadoopProps.setProperty("log4j.logger.org.apache.spark", logLevel + ", 
A, jobid");
-        hadoopProps.setProperty("log4j.additivity.org.apache.spark", "false");
-        hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-        hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-        hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
-        hadoopProps.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
-        hadoopProps.setProperty("log4j.appender.jobid.file", logFile);
-        hadoopProps.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
-        
hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", "INFO, 
jobid");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A");
+        log4jProperties.setProperty("log4j.logger.org.apache.spark", logLevel 
+ ", A, jobid");
+        log4jProperties.setProperty("log4j.additivity.org.apache.spark", 
"false");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.jobid.file", logFile);
+        log4jProperties.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.jobid.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", 
"INFO, jobid");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
 
         String localProps = new File(SPARK_LOG4J_PROPS).getAbsolutePath();
-        OutputStream os1 = new FileOutputStream(localProps);
-        try {
-            hadoopProps.store(os1, "");
-        }
-        finally {
-            os1.close();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
         }
+
         PropertyConfigurator.configure(SPARK_LOG4J_PROPS);
         return logFile;
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/cd699bf5/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java 
b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index cb8f56e..6672ffb 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -106,7 +106,7 @@ public class SqoopMain extends LauncherMain {
         return sqoopConf;
     }
 
-    public static String setUpSqoopLog4J(Configuration sqoopConf) throws 
IOException {
+    private String setUpSqoopLog4J(Configuration sqoopConf) throws IOException 
{
         //Logfile to capture job IDs
         String hadoopJobId = System.getProperty("oozie.launcher.job.id");
         if (hadoopJobId == null) {
@@ -115,40 +115,27 @@ public class SqoopMain extends LauncherMain {
 
         String logFile = new File("sqoop-oozie-" + hadoopJobId + 
".log").getAbsolutePath();
 
-        Properties hadoopProps = new Properties();
-
-        // Preparing log4j configuration
-        URL log4jFile = 
Thread.currentThread().getContextClassLoader().getResource("log4j.properties");
-        if (log4jFile != null) {
-            // getting hadoop log4j configuration
-            hadoopProps.load(log4jFile.openStream());
-        }
-
         String logLevel = sqoopConf.get("oozie.sqoop.log.level", "INFO");
         String rootLogLevel = sqoopConf.get("oozie.action." + 
LauncherMapper.ROOT_LOGGER_LEVEL, "INFO");
 
-        hadoopProps.setProperty("log4j.rootLogger", rootLogLevel + ", A");
-        hadoopProps.setProperty("log4j.logger.org.apache.sqoop", logLevel + ", 
A");
-        hadoopProps.setProperty("log4j.additivity.org.apache.sqoop", "false");
-        hadoopProps.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
-        hadoopProps.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
-        hadoopProps.setProperty("log4j.appender.A.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
-
-        hadoopProps.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
-        hadoopProps.setProperty("log4j.appender.jobid.file", logFile);
-        hadoopProps.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
-        
hadoopProps.setProperty("log4j.appender.jobid.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
-        hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid, A");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", "INFO, 
jobid, A");
-        
hadoopProps.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
+        log4jProperties.setProperty("log4j.rootLogger", rootLogLevel + ", A");
+        log4jProperties.setProperty("log4j.logger.org.apache.sqoop", logLevel 
+ ", A");
+        log4jProperties.setProperty("log4j.additivity.org.apache.sqoop", 
"false");
+        log4jProperties.setProperty("log4j.appender.A", 
"org.apache.log4j.ConsoleAppender");
+        log4jProperties.setProperty("log4j.appender.A.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.A.layout.ConversionPattern", "%d 
[%t] %-5p %c %x - %m%n");
+
+        log4jProperties.setProperty("log4j.appender.jobid", 
"org.apache.log4j.FileAppender");
+        log4jProperties.setProperty("log4j.appender.jobid.file", logFile);
+        log4jProperties.setProperty("log4j.appender.jobid.layout", 
"org.apache.log4j.PatternLayout");
+        
log4jProperties.setProperty("log4j.appender.jobid.layout.ConversionPattern", 
"%d [%t] %-5p %c %x - %m%n");
+        log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapred", 
"INFO, jobid, A");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.mapreduce.Job", 
"INFO, jobid, A");
+        
log4jProperties.setProperty("log4j.logger.org.apache.hadoop.yarn.client.api.impl.YarnClientImpl",
 "INFO, jobid");
 
         String localProps = new File(SQOOP_LOG4J_PROPS).getAbsolutePath();
-        OutputStream os1 = new FileOutputStream(localProps);
-        try {
-            hadoopProps.store(os1, "");
-        }
-        finally {
-            os1.close();
+        try (OutputStream os1 = new FileOutputStream(localProps)) {
+            log4jProperties.store(os1, "");
         }
 
         PropertyConfigurator.configure(SQOOP_LOG4J_PROPS);

Reply via email to