Github user mengxr commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4274#discussion_r23876009
  
    --- Diff: 
mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
 ---
    @@ -246,4 +248,86 @@ class BlockMatrix(
         val localMat = toLocalMatrix()
         new BDM[Double](localMat.numRows, localMat.numCols, localMat.toArray)
       }
    +
    +  /** Adds two block matrices together. The matrices must have the same 
size and matching
    +    * `rowsPerBlock` and `colsPerBlock` values. If one of the blocks that 
are being added are
    +    * instances of [[SparseMatrix]], the resulting sub matrix will also be 
a [[SparseMatrix]], even
    +    * if it is being added to a [[DenseMatrix]]. If two dense matrices are 
added, the output will
    +    * also be a [[DenseMatrix]].
    +    */
    +  def add(other: BlockMatrix): BlockMatrix = {
    +    require(numRows() == other.numRows(), "Both matrices must have the 
same number of rows. " +
    +      s"A.numRows: ${numRows()}, B.numRows: ${other.numRows()}")
    +    require(numCols() == other.numCols(), "Both matrices must have the 
same number of columns. " +
    +      s"A.numCols: ${numCols()}, B.numCols: ${other.numCols()}")
    +    if (rowsPerBlock == other.rowsPerBlock && colsPerBlock == 
other.colsPerBlock) {
    +      val addedBlocks = blocks.cogroup(other.blocks, createPartitioner())
    +        .map { case ((blockRowIndex, blockColIndex), (a, b)) =>
    +          if (a.size > 1 || b.size > 1) {
    +            throw new SparkException("There are MatrixBlocks with 
duplicate indices. Please " +
    +              "remove them.")
    +          }
    +          if (a.isEmpty) {
    +            new MatrixBlock((blockRowIndex, blockColIndex), b.head)
    +          } else if (b.isEmpty) {
    +            new MatrixBlock((blockRowIndex, blockColIndex), a.head)
    +          } else {
    +            val result = a.head.toBreeze + b.head.toBreeze
    +            new MatrixBlock((blockRowIndex, blockColIndex), 
Matrices.fromBreeze(result))
    +          }
    +      }
    +      new BlockMatrix(addedBlocks, rowsPerBlock, colsPerBlock, numRows(), 
numCols())
    +    } else {
    +      throw new SparkException("Cannot add matrices with different block 
dimensions")
    +    }
    +  }
    +
    +  /** Left multiplies this [[BlockMatrix]] to `other`, another 
[[BlockMatrix]]. The `colsPerBlock`
    +    * of this matrix must equal the `rowsPerBlock` of `other`. If `other` 
contains
    +    * [[SparseMatrix]], they will have to be converted to a 
[[DenseMatrix]]. The output
    +    * [[BlockMatrix]] will only consist of blocks of [[DenseMatrix]]. This 
may cause
    +    * some performance issues until support for multiplying two sparse 
matrices is added.
    +    */
    +  def multiply(other: BlockMatrix): BlockMatrix = {
    +    require(numCols() == other.numRows(), "The number of columns of A and 
the number of rows " +
    +      s"of B must be equal. A.numCols: ${numCols()}, B.numRows: 
${other.numRows()}. If you " +
    +      "think they should be equal, try setting the dimensions of A and B 
explicitly while " +
    +      "initializing them.")
    +    if (colsPerBlock == other.rowsPerBlock) {
    +      val resultPartitioner = GridPartitioner(numRowBlocks, 
other.numColBlocks,
    +        math.max(blocks.partitions.length, other.blocks.partitions.length))
    +      // Each block of A must be multiplied with the corresponding blocks 
in each column of B.
    +      // TODO: Optimize to send block to a partition once, similar to ALS
    +      val flatA = blocks.flatMap { case ((blockRowIndex, blockColIndex), 
block) =>
    +        Iterator.tabulate(other.numColBlocks)(j => ((blockRowIndex, j, 
blockColIndex), block))
    +      }
    +      // Each block of B must be multiplied with the corresponding blocks 
in each row of A.
    +      val flatB = other.blocks.flatMap { case ((blockRowIndex, 
blockColIndex), block) =>
    +        Iterator.tabulate(numRowBlocks)(i => ((i, blockColIndex, 
blockRowIndex), block))
    +      }
    +      val newBlocks: RDD[MatrixBlock] = flatA.cogroup(flatB, 
resultPartitioner)
    +        .flatMap { case ((blockRowIndex, blockColIndex, _), (a, b)) =>
    +          if (a.size > 1 || b.size > 1) {
    +            throw new SparkException("There are MatrixBlocks with 
duplicate indices. Please " +
    --- End diff --
    
    Same here.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to