seojangho closed pull request #9: [NEMO-40] Solve SonarCloud issues for 
Frontend and Examples
URL: https://github.com/apache/incubator-nemo/pull/9
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.travis.yml b/.travis.yml
index 1fe90bcf..687880ab 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -16,9 +16,15 @@
 # .travis.yml
 # For maven builds
 language: java
+dist: trusty
+sudo: false
+install: true
+jdk:
+  - oraclejdk8
 script:
   # the following command line builds the project, runs the tests with 
coverage and then execute the SonarCloud analysis
-  - mvn clean org.jacoco:jacoco-maven-plugin:prepare-agent verify sonar:sonar 
-B -q -ff -Dsurefire.useFile=false -Dorg.slf4j.simpleLogger.defaultLogLevel=info
+  - if [ "$TRAVIS_PULL_REQUEST" == false ]; then mvn clean 
org.jacoco:jacoco-maven-plugin:prepare-agent verify sonar:sonar -B -q -ff 
-Dsurefire.useFile=false -Dorg.slf4j.simpleLogger.defaultLogLevel=info; fi
+  - if [ "$TRAVIS_PULL_REQUEST" != false ]; then mvn clean verify -B -q -ff 
-Dsurefire.useFile=false -Dorg.slf4j.simpleLogger.defaultLogLevel=info; fi
 
 notifications:
   slack:
diff --git 
a/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/NemoPipelineVisitor.java
 
b/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/NemoPipelineVisitor.java
index e7cc9b56..f0c1fb3e 100644
--- 
a/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/NemoPipelineVisitor.java
+++ 
b/compiler/frontend/beam/src/main/java/edu/snu/nemo/compiler/frontend/beam/NemoPipelineVisitor.java
@@ -149,7 +149,8 @@ public void visitPrimitiveTransform(final 
TransformHierarchy.Node beamNode) {
       // Since outgoing PValues for CreateViewTransform is PCollectionView, we 
cannot use PCollection::getCoder to
       // obtain coders.
       final Coder beamInputCoder = beamNode.getInputs().values().stream()
-          .filter(v -> v instanceof PCollection).findFirst().map(v -> 
(PCollection) v).get().getCoder();
+          .filter(v -> v instanceof PCollection).map(v -> (PCollection) 
v).findFirst()
+          .orElseThrow(() -> new RuntimeException("No inputs provided to " + 
beamNode.getFullName())).getCoder();
       beamNode.getOutputs().values().stream()
           .forEach(output -> pValueToCoder.put(output, 
getCoderForView(view.getView().getViewFn(), beamInputCoder)));
     } else if (beamTransform instanceof Window) {
diff --git 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/java/SparkFrontendUtils.java
 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/java/SparkFrontendUtils.java
index c331e445..e49a2d29 100644
--- 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/java/SparkFrontendUtils.java
+++ 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/core/java/SparkFrontendUtils.java
@@ -102,14 +102,16 @@ public static Serializer deriveSerializerFrom(final 
SparkContext sparkContext) {
       // TODO #740: remove this part, and make it properly transfer with 
executor.
       File file = new File(resultFile + i);
       while (file.exists()) {
-        final FileInputStream fin = new FileInputStream(file);
-        final ObjectInputStream ois = new ObjectInputStream(fin);
-        result.addAll((List<T>) ois.readObject());
-        ois.close();
+        try (final FileInputStream fin = new FileInputStream(file)) {
+          try (final ObjectInputStream ois = new ObjectInputStream(fin)) {
+            result.addAll((List<T>) ois.readObject());
+          }
+        }
 
         // Delete temporary file
-        file.delete();
-        file = new File(resultFile + ++i);
+        if (file.delete()) {
+          file = new File(resultFile + ++i);
+        }
       }
       return result;
     } catch (Exception e) {
diff --git 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/sql/SparkSession.java
 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/sql/SparkSession.java
index b674a20a..f2b5653a 100644
--- 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/sql/SparkSession.java
+++ 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/sql/SparkSession.java
@@ -46,6 +46,7 @@
    * Constructor.
    *
    * @param sparkContext the spark context for the session.
+   * @param initialConf initial spark session configuration.
    */
   private SparkSession(final SparkContext sparkContext, final Map<String, 
String> initialConf) {
     super(sparkContext);
@@ -112,11 +113,11 @@ void appendCommand(final String cmd, final Object... 
args) {
       final String className = cmd[0];
       final String methodName = cmd[1];
       final Object[] args = command.getValue();
-      final Class<?>[] argTypes = Stream.of(args).map(o -> 
o.getClass()).toArray(Class[]::new);
+      final Class<?>[] argTypes = 
Stream.of(args).map(Object::getClass).toArray(Class[]::new);
 
-      if (!className.equals(SparkSession.class.getName())
-          && !className.equals(DataFrameReader.class.getName())
-          && !className.equals(Dataset.class.getName())) {
+      if (!SparkSession.class.getName().equals(className)
+          && !DataFrameReader.class.getName().equals(className)
+          && !Dataset.class.getName().equals(className)) {
         throw new OperationNotSupportedException(command + " is not yet 
supported.");
       }
 
diff --git 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
index e5c16187..94bfb11c 100644
--- 
a/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
+++ 
b/compiler/frontend/spark/src/main/java/edu/snu/nemo/compiler/frontend/spark/transform/CollectTransform.java
@@ -23,7 +23,6 @@
 import java.io.ObjectOutputStream;
 import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.List;
 
 /**
  * Collect transform.
@@ -49,13 +48,12 @@ public void prepare(final Context context, final 
OutputCollector<T> outputCollec
   public void onData(final Iterator<T> elements, final String srcVertexId) {
     // Write result to a temporary file.
     // TODO #740: remove this part, and make it properly transfer with 
executor.
-    try {
-      final FileOutputStream fos = new FileOutputStream(filename);
-      final ObjectOutputStream oos = new ObjectOutputStream(fos);
-      final List<T> list = new ArrayList<>();
-      elements.forEachRemaining(list::add);
-      oos.writeObject(list);
-      oos.close();
+    try (final FileOutputStream fos = new FileOutputStream(filename)) {
+      try (final ObjectOutputStream oos = new ObjectOutputStream(fos)) {
+        final ArrayList<T> list = new ArrayList<>();
+        elements.forEachRemaining(list::add);
+        oos.writeObject(list);
+      }
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
diff --git 
a/examples/beam/src/main/java/edu/snu/nemo/examples/beam/AlternatingLeastSquare.java
 
b/examples/beam/src/main/java/edu/snu/nemo/examples/beam/AlternatingLeastSquare.java
index 5019377b..ae008498 100644
--- 
a/examples/beam/src/main/java/edu/snu/nemo/examples/beam/AlternatingLeastSquare.java
+++ 
b/examples/beam/src/main/java/edu/snu/nemo/examples/beam/AlternatingLeastSquare.java
@@ -226,7 +226,7 @@ public void processElement(final ProcessContext c) throws 
Exception {
         for (Integer j = 0; j < numFeatures; j++) {
 //          LOG.info("Rating index " + ratingIndex);
           if (j < fixedMatrix.get(ratingIndex).size()) {
-            conf[j] = fixedMatrix.get(ratingIndex).get(j).doubleValue();
+            conf[j] = fixedMatrix.get(ratingIndex).get(j);
           } else {
             conf[j] = 0.0;
           }
diff --git 
a/examples/spark/src/main/java/edu/snu/nemo/examples/spark/sql/JavaSparkSQLExample.java
 
b/examples/spark/src/main/java/edu/snu/nemo/examples/spark/sql/JavaSparkSQLExample.java
index 6c9a37d4..fea35e60 100644
--- 
a/examples/spark/src/main/java/edu/snu/nemo/examples/spark/sql/JavaSparkSQLExample.java
+++ 
b/examples/spark/src/main/java/edu/snu/nemo/examples/spark/sql/JavaSparkSQLExample.java
@@ -200,7 +200,9 @@ private static void runBasicDataFrameExample(final 
SparkSession spark, final Str
     // +----+-------+
 
     // Global temporary view is cross-session
-    spark.newSession().sql("SELECT * FROM global_temp.people").show();
+    try (final org.apache.spark.sql.SparkSession newSession = 
spark.newSession()) {
+      newSession.sql("SELECT * FROM global_temp.people").show();
+    }
     // +----+-------+
     // | age|   name|
     // +----+-------+


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to