Repository: oozie
Updated Branches:
  refs/heads/master ad660cf64 -> bf5bd91b7


OOZIE-3228 amend [Spark action] Can't load properties from spark-defaults.conf 
(andras.piros)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/bf5bd91b
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/bf5bd91b
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/bf5bd91b

Branch: refs/heads/master
Commit: bf5bd91b76a4d0f61fa995c5b6cb062856bd3478
Parents: ad660cf
Author: Andras Piros <andras.pi...@cloudera.com>
Authored: Mon May 28 13:38:13 2018 +0200
Committer: Andras Piros <andras.pi...@cloudera.com>
Committed: Mon May 28 13:38:13 2018 +0200

----------------------------------------------------------------------
 release-log.txt                                 |  1 +
 .../action/hadoop/TestSparkArgsExtractor.java   | 54 ++++++++++++--------
 2 files changed, 33 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/bf5bd91b/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index cc36879..211eb2f 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.1.0 release (trunk - unreleased)
 
+OOZIE-3228 amend [Spark action] Can't load properties from spark-defaults.conf 
(andras.piros)
 OOZIE-3257 TestHiveActionExecutor#testHiveAction still fails (pbacsko via 
andras.piros)
 OOZIE-2352 Unportable shebang in shell scripts (dbist13 via andras.piros)
 OOZIE-2211 Remove OozieCLI#validateCommandV41 (dbist13 via andras.piros)

http://git-wip-us.apache.org/repos/asf/oozie/blob/bf5bd91b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
----------------------------------------------------------------------
diff --git 
a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
 
b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
index d75e727..60ab8b9 100644
--- 
a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
+++ 
b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
@@ -29,11 +29,14 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
 import static 
org.apache.oozie.action.hadoop.SparkArgsExtractor.SPARK_DEFAULTS_GENERATED_PROPERTIES;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 public class TestSparkArgsExtractor {
 
@@ -318,37 +321,44 @@ public class TestSparkArgsExtractor {
         actionConf.set(SparkActionExecutor.SPARK_JOB_NAME, "Spark Copy File");
         actionConf.set(SparkActionExecutor.SPARK_JAR, "/lib/test.jar");
 
-        createTemporaryFileWithContent(SPARK_DEFAULTS_PROPERTIES, 
"spark.executor.extraClassPath=/etc/hbase/conf:/etc/hive/conf\n" +
+        createTemporaryFileWithContent(SPARK_DEFAULTS_PROPERTIES,
+                
"spark.executor.extraClassPath=/etc/hbase/conf:/etc/hive/conf\n" +
                 "spark.driver.extraClassPath=/etc/hbase/conf:/etc/hive/conf\n" 
+
                 "spark.executor.extraJavaOptions=-XX:+UseG1GC -XX:+PrintGC 
-XX:+UnlockExperimentalVMOptions\n" +
                 "spark.driver.extraJavaOptions=-XX:+UseG1GC -XX:+PrintGC 
-XX:+UnlockExperimentalVMOptions");
 
         final List<String> sparkArgs = new 
SparkArgsExtractor(actionConf).extract(new String[0]);
 
-        assertEquals("Spark args mismatch",
-                Lists.newArrayList("--master", "yarn",
-                        "--deploy-mode", "client",
-                        "--name", "Spark Copy File",
-                        "--class", "org.apache.oozie.example.SparkFileCopy",
-                        "--conf", 
"spark.executor.extraClassPath=/etc/hbase/conf:/etc/hive/conf:$PWD/*",
-                        "--conf", 
"spark.driver.extraClassPath=/etc/hbase/conf:/etc/hive/conf:$PWD/*",
-                        "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
-                        "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
-                        "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
-                        "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
-                        "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
-                        "--conf", 
"spark.yarn.security.credentials.hbase.enabled=false",
-                        "--conf", 
"spark.executor.extraJavaOptions=-XX:+UseG1GC -XX:+PrintGC 
-XX:+UnlockExperimentalVMOptions " +
-                                "-Dlog4j.configuration=spark-log4j.properties",
-                        "--conf", "spark.driver.extraJavaOptions=-XX:+UseG1GC 
-XX:+PrintGC -XX:+UnlockExperimentalVMOptions " +
-                                "-Dlog4j.configuration=spark-log4j.properties",
-                        "--properties-file", 
"spark-defaults-oozie-generated.properties",
-                        "--files", "spark-log4j.properties,hive-site.xml",
-                        "--conf", "spark.yarn.jar=null",
-                        "--verbose", "/lib/test.jar"),
+        assertContainsSublist(
+                Lists.newArrayList("--conf", 
"spark.executor.extraClassPath=/etc/hbase/conf:/etc/hive/conf:$PWD/*"),
+                sparkArgs);
+        assertContainsSublist(
+                Lists.newArrayList("--conf", 
"spark.driver.extraClassPath=/etc/hbase/conf:/etc/hive/conf:$PWD/*"),
+                sparkArgs);
+        assertContainsSublist(
+                Lists.newArrayList("--conf", 
"spark.executor.extraJavaOptions=-XX:+UseG1GC -XX:+PrintGC " +
+                        "-XX:+UnlockExperimentalVMOptions 
-Dlog4j.configuration=spark-log4j.properties"),
+                sparkArgs);
+        assertContainsSublist(
+                Lists.newArrayList("--conf", 
"spark.driver.extraJavaOptions=-XX:+UseG1GC -XX:+PrintGC " +
+                        "-XX:+UnlockExperimentalVMOptions 
-Dlog4j.configuration=spark-log4j.properties"),
                 sparkArgs);
     }
 
+    private void assertContainsSublist(final List<String> expected, final 
List<String> actual) {
+        final int sublistSize = expected.size();
+        assertTrue("actual size is below expected size", actual.size() >= 
sublistSize);
+
+        for (int ixActual = 0; ixActual <= actual.size() - sublistSize; 
ixActual++) {
+            final List<String> actualSublist = actual.subList(ixActual, 
ixActual + sublistSize);
+            if (Arrays.deepEquals(expected.toArray(), 
actualSublist.toArray())) {
+                return;
+            }
+        }
+
+        fail(String.format("actual:\n%s does not contain expected:\n%s", 
actual, expected));
+    }
+
     private Properties readMergedProperties() throws IOException {
         final File file = new File(SPARK_DEFAULTS_GENERATED_PROPERTIES);
         file.deleteOnExit();

Reply via email to