Github user jkbradley commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20829#discussion_r175907866
  
    --- Diff: 
mllib/src/test/scala/org/apache/spark/ml/feature/VectorAssemblerSuite.scala ---
    @@ -37,24 +37,26 @@ class VectorAssemblerSuite
     
       test("assemble") {
         import org.apache.spark.ml.feature.VectorAssembler.assemble
    -    assert(assemble(0.0) === Vectors.sparse(1, Array.empty, Array.empty))
    -    assert(assemble(0.0, 1.0) === Vectors.sparse(2, Array(1), Array(1.0)))
    +    assert(assemble(Seq(1), true)(0.0) === Vectors.sparse(1, Array.empty, 
Array.empty))
    +    assert(assemble(Seq(1, 1), true)(0.0, 1.0) === Vectors.sparse(2, 
Array(1), Array(1.0)))
         val dv = Vectors.dense(2.0, 0.0)
    -    assert(assemble(0.0, dv, 1.0) === Vectors.sparse(4, Array(1, 3), 
Array(2.0, 1.0)))
    +    assert(assemble(Seq(1, 2, 1), true)(0.0, dv, 1.0) ===
    +      Vectors.sparse(4, Array(1, 3), Array(2.0, 1.0)))
         val sv = Vectors.sparse(2, Array(0, 1), Array(3.0, 4.0))
    -    assert(assemble(0.0, dv, 1.0, sv) ===
    +    assert(assemble(Seq(1, 2, 1, 2), true)(0.0, dv, 1.0, sv) ===
           Vectors.sparse(6, Array(1, 3, 4, 5), Array(2.0, 1.0, 3.0, 4.0)))
    -    for (v <- Seq(1, "a", null)) {
    -      intercept[SparkException](assemble(v))
    -      intercept[SparkException](assemble(1.0, v))
    +    for (v <- Seq(1, "a")) {
    +      intercept[SparkException](assemble(Seq(1), true)(v))
    +      intercept[SparkException](assemble(Seq(1, 1), true)(1.0, v))
         }
       }
     
       test("assemble should compress vectors") {
         import org.apache.spark.ml.feature.VectorAssembler.assemble
    -    val v1 = assemble(0.0, 0.0, 0.0, Vectors.dense(4.0))
    +    val v1 = assemble(Seq(1, 1, 1, 4), true)(0.0, 0.0, 0.0, 
Vectors.dense(4.0))
    --- End diff --
    
    We probably want this to fail, right?  It expects a Vector of length 4 but 
is given a Vector of length 1.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to