[ 
https://issues.apache.org/jira/browse/MAHOUT-1653?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14615545#comment-14615545
 ] 

ASF GitHub Bot commented on MAHOUT-1653:
----------------------------------------

Github user andrewpalumbo commented on a diff in the pull request:

    https://github.com/apache/mahout/pull/146#discussion_r33975950
  
    --- Diff: 
spark-shell/src/main/scala/org/apache/mahout/sparkbindings/shell/MahoutSparkILoop.scala
 ---
    @@ -48,55 +77,63 @@ class MahoutSparkILoop extends SparkILoop {
     
         conf.set("spark.executor.memory", "1g")
     
    -    sparkContext = mahoutSparkContext(
    +    _interp.sparkContext = mahoutSparkContext(
           masterUrl = master,
           appName = "Mahout Spark Shell",
           customJars = jars,
           sparkConf = conf
         )
     
    -    echo("Created spark context..")
    +    echoToShell("Created spark context..")
         sparkContext
       }
     
    +  // need to change our SparkDistributedContext name to 'sc' since  we 
cannot override the
    +  // private sparkCleanUp() method.
    +  // this is technically not part of Sparks explicitly defined Developer 
API though
    +  // nothing in the SparkILoopInit.scala file is marked as such.
       override def initializeSpark() {
    -    intp.beQuietDuring {
    -      command("""
    +    _interp.beQuietDuring {
    +      _interp.interpret("""
     
    -         @transient implicit val sdc: 
org.apache.mahout.math.drm.DistributedContext =
    +         @transient implicit val sc: 
org.apache.mahout.math.drm.DistributedContext =
    --- End diff --
    
    the problem is that the clean up method that we were using is not part of 
the Spark REPL Developer API.  So the method that we were using to close the 
SparkContext:
    ```
    def sparkCleanUp() {
          echoToShell("Stopping Spark context.")
          _interp.beQuietDuring {
            _interp.interpret("sdc.stop()")
          }
       }
    ```
    
    is not available to us to override anymore.  So upon exiting the shell, the 
private `SparkIloop.sparkCleanUp()` is called and looks to call `sc.close()`: 
      
    ```
    private def sparkCleanUp(){
        echo("Stopping spark context.")
        intp.beQuietDuring {
          command("sc.stop()")
        }
      }
     ```
    
    and giving an error,  leaving our `sdc` context open.


> Spark 1.3
> ---------
>
>                 Key: MAHOUT-1653
>                 URL: https://issues.apache.org/jira/browse/MAHOUT-1653
>             Project: Mahout
>          Issue Type: Dependency upgrade
>            Reporter: Andrew Musselman
>            Assignee: Andrew Palumbo
>             Fix For: 0.11.0
>
>
> Support Spark 1.3



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to