Github user dbtsai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/8563#discussion_r41109118
  
    --- Diff: 
mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
 ---
    @@ -402,4 +445,402 @@ class BlockMatrix @Since("1.3.0") (
             s"A.colsPerBlock: $colsPerBlock, B.rowsPerBlock: 
${other.rowsPerBlock}")
         }
       }
    +
    +  /** Schur Complement of a BlockMatrix.  For a matrix that is in 4 
partitions:
    +    *  A=[a11, a12; a21; a22], the Schur Complement S is S = a22 - (a21 * 
a11^-1 * a12).
    +    * The Schur Complement is always (n-1) x (n-1), which is the size of 
a22.
    +    *
    +    * @return BlockMatrix Schur Complement as BlockMatrix
    +    * @since 1.6.0
    +  */
    +    private[mllib] def SchurComplement: BlockMatrix = {
    +     require(this.numRowBlocks == this.numColBlocks, "Block Matrix must be 
square.")
    +     require(this.numRowBlocks > 1, "Block Matrix must be larger than one 
block.")
    +     val topRange = (0, 0); val botRange = (1, this.numColBlocks - 1)
    +     val a11 = this.subBlock(topRange, topRange)
    +     val a12 = this.subBlock(topRange, botRange)
    +     val a21 = this.subBlock(botRange, topRange)
    +     val a22 = this.subBlock(botRange, botRange)
    +
    +    val a11Brz = inv(a11.toBreeze) // note that intermediate a11 calcs 
derive from inv(a11)
    +    val a11Mtx = Matrices.dense(a11.numRows.toInt, a11.numCols.toInt, 
a11Brz.toArray)
    +    val a11RDD = this.blocks.sparkContext.parallelize(Seq(((0, 0), 
a11Mtx)))
    +    val a11Inv = new BlockMatrix(a11RDD, this.rowsPerBlock, 
this.colsPerBlock)
    +
    +    val S = a22.subtract(a21.multiply(a11Inv.multiply(a12)))
    +    return S
    +  }
    +
    +  /** Returns a rectangular (sub)BlockMatrix with block ranges as 
specified.
    +    *
    +    * @param blockRowRange The lower and upper row ranges, as (Int,Int)
    +    * @param blockColRange The lower and upper col ranges, as (Int, Int)
    +    * @return a BlockMatrix with (0,0) as the upper leftmost block index
    +    * @since 1.6.0
    +    */
    +
    +  private [mllib] def subBlock(blockRowRange: (Int, Int), blockColRange: 
(Int, Int)):
    +          BlockMatrix = {
    +    //  Extracts BlockMatrix elements from a specified range of block 
indices
    +    //  Creating a Sub BlockMatrix of rectangular shape.
    +    //  Also reindexes so that the upper left block is always (0, 0)
    +
    +    // JNDB: Add a require statement ...rowMax<=size..
    +    val rowMin = blockRowRange._1;    val rowMax = blockRowRange._2
    +    val colMin = blockColRange._1 ;   val colMax = blockColRange._2
    +    val extractedSeq = this.blocks.filter{ case((x, y), matrix) =>
    +      x >= rowMin && x<= rowMax &&         // finding blocks
    +        y >= colMin && y<= colMax }.map{   // shifting indices
    +      case(((x, y), matrix) ) => ((x-rowMin, y-colMin), matrix)
    +    }
    +    return new BlockMatrix(extractedSeq, rowsPerBlock, colsPerBlock)
    +  }
    +
    +  /** computes the LU decomposition of a Single Block from BlockMatrix 
using the
    +    * Breeze LU method.  The method (as written) operates -only- on the 
upper
    +    * left (0,0) corner of the BlockMatrix.
    +    *
    +    * @return List[BDM[Double]] of Breeze Matrices (BDM) (P,L,U) for 
blockLU method.
    +    * @since 1.6.0
    +  */
    +  private [mllib] def singleBlockPLU: List[BDM[Double]] = {
    +    // returns PA = LU factorization from Breeze
    +    val PLU = LU(this.subBlock((0, 0), (0, 0)).toBreeze)
    +    val k = PLU._1.cols
    +    val L = lowerTriangular(PLU._1) - diag(diag(PLU._1)) + 
diag(DenseVector.fill(k){1.0})
    +    val U = upperTriangular(PLU._1);
    +    var P = diag(DenseVector.fill(k){1.0})
    +    val Pi = diag(DenseVector.fill(k){1.0})
    +    // size of square matrix
    +    for(i <- 0 to (k - 1)) { // i test populating permutation matrix
    --- End diff --
    
    Forloop is slow. Please use while loop here. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to