Github user xubo245 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20583#discussion_r167479724
  
    --- Diff: 
mllib/src/test/scala/org/apache/spark/ml/image/ImageSchemaSuite.scala ---
    @@ -65,11 +65,71 @@ class ImageSchemaSuite extends SparkFunSuite with 
MLlibTestSparkContext {
         assert(count50 > 0 && count50 < countTotal)
       }
     
    +  test("readImages test: recursive = false") {
    +    val df = readImages(imagePath, null, false, 3, true, 1.0, 0)
    +    assert(df.count() === 0)
    +  }
    +
    +  test("readImages test: read jpg image") {
    +    val df = readImages(imagePath + "/kittens/DP153539.jpg", null, false, 
3, true, 1.0, 0)
    +    assert(df.count() === 1)
    +  }
    +
    +  test("readImages test: read png image") {
    +    val df = readImages(imagePath + "/multi-channel/BGRA.png", null, 
false, 3, true, 1.0, 0)
    +    assert(df.count() === 1)
    +  }
    +
    +  test("readImages test: read non image") {
    +    val df = readImages(imagePath + "/kittens/not-image.txt", null, false, 
3, true, 1.0, 0)
    +    assert(df.count() === 0)
    +  }
    +
    +  test("readImages test: read non image and dropImageFailures is false") {
    +    val df = readImages(imagePath + "/kittens/not-image.txt", null, false, 
3, false, 1.0, 0)
    +    assert(df.count() === 1)
    +  }
    +
    +  test("readImages test: sampleRatio > 1") {
    +    val e = intercept[IllegalArgumentException] {
    +      readImages(imagePath, null, true, 3, true, 1.1, 0)
    +    }
    +    assert(e.getMessage.equals("requirement failed: sampleRatio should be 
between 0 and 1"))
    +  }
    +
    +  test("readImages test: sampleRatio < 0") {
    +    val e = intercept[IllegalArgumentException] {
    +      readImages(imagePath, null, true, 3, true, -0.1, 0)
    +    }
    +    assert(e.getMessage.equals("requirement failed: sampleRatio should be 
between 0 and 1"))
    +  }
    +
    +  test("readImages test: sampleRatio = 0") {
    +    val df = readImages(imagePath, null, true, 3, true, 0.0, 0)
    +    assert(df.count() === 0)
    +  }
    +
    +  test("readImages test: with sparkSession") {
    +    val df = readImages(imagePath, sparkSession = spark, true, 3, true, 
1.0, 0)
    +    assert(df.count() === 7)
    +  }
    +
       test("readImages partition test") {
         val df = readImages(imagePath, null, true, 3, true, 1.0, 0)
         assert(df.rdd.getNumPartitions === 3)
       }
     
    +  test("readImages partition test: < 0") {
    +    val df = readImages(imagePath, null, true, -3, true, 1.0, 0)
    +    assert(df.rdd.getNumPartitions === 
spark.sparkContext.defaultParallelism)
    +  }
    +
    +  test("readImages partition test: = 0") {
    +    val df = readImages(imagePath, null, true, 0, true, 1.0, 0)
    +    assert(df.rdd.getNumPartitions != 0)
    --- End diff --
    
    Ok, done


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to