Github user mengxr commented on a diff in the pull request:

    https://github.com/apache/spark/pull/11919#discussion_r61708647
  
    --- Diff: 
mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala ---
    @@ -520,6 +488,81 @@ class ALSSuite
       }
     }
     
    +class ALSCleanerSuite extends SparkFunSuite {
    +  test("Clean shuffles") {
    +    val conf = new SparkConf()
    +    val localDir = Utils.createTempDir()
    +    val tempDir = Utils.createTempDir()
    +    def getAllFiles: Set[File] =
    +      FileUtils.listFiles(localDir, TrueFileFilter.INSTANCE, 
TrueFileFilter.INSTANCE).asScala.toSet
    +    try {
    +      conf.set("spark.local.dir", localDir.getAbsolutePath)
    +      conf.set("spark.shuffle.manager", "sort")
    +      val sc = new SparkContext("local[2]", "test", conf)
    +      try {
    +        sc.setCheckpointDir(tempDir.getAbsolutePath)
    +        // Test checkpoint and clean parents
    +        val filesBefore = getAllFiles
    +        val input = sc.parallelize(1 to 1000)
    +        val keyed = input.map(x => (x % 20, 1))
    +        val shuffled = keyed.reduceByKey(_ + _)
    +        val keysOnly = shuffled.map{case (x, _) => x}
    +        val deps = keysOnly.dependencies
    +        keysOnly.checkpoint()
    +        keysOnly.count()
    +        ALS.cleanShuffleDependencies(sc, deps, true)
    +        assert(keysOnly.isCheckpointed)
    +        val resultingFiles = getAllFiles -- filesBefore
    +        assert(resultingFiles === Set())
    +      } finally {
    +        sc.stop()
    +      }
    +    } finally {
    +      Utils.deleteRecursively(localDir)
    +      Utils.deleteRecursively(tempDir)
    +    }
    +  }
    +
    +  test("ALS shuffle cleanup") {
    +    val conf = new SparkConf()
    +    val localDir = Utils.createTempDir()
    +    val tempDir = Utils.createTempDir()
    +    def getAllFiles: Set[File] =
    +      FileUtils.listFiles(localDir, TrueFileFilter.INSTANCE, 
TrueFileFilter.INSTANCE).asScala.toSet
    +    try {
    +      conf.set("spark.local.dir", localDir.getAbsolutePath)
    +      conf.set("spark.shuffle.manager", "sort")
    +      val sc = new SparkContext("local[2]", "test", conf)
    +      try {
    +        sc.setCheckpointDir(tempDir.getAbsolutePath)
    +        // Generate test data
    +        val (training, _) = ALSSuite.genImplicitTestData(sc, 100, 10, 1, 
0.2, 0)
    +        // Implicitly test the cleaning of parents during ALS training
    +        val filesBefore = getAllFiles
    +        val sqlContext = new SQLContext(sc)
    +        import sqlContext.implicits._
    +        val als = new ALS()
    +          .setRank(1)
    +          .setRegParam(1e-5)
    +          .setSeed(0)
    +          .setCheckpointInterval(1)
    +        val model = als.fit(training.toDF())
    +        val resultingFiles = getAllFiles -- filesBefore
    +        // We expect the last shuffles file to be around, but no more
    +        val pattern = "shuffle_(\\d+)_.+\\.data".r
    +        val rddIds = resultingFiles.flatMap(f =>
    --- End diff --
    
    `.flatMap { f =>`. We usually use `(f => ...)` if it fits one line.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to