Github user brkyvz commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4274#discussion_r23862043
  
    --- Diff: 
mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala
 ---
    @@ -237,4 +239,88 @@ class BlockMatrix(
         val localMat = toLocalMatrix()
         new BDM[Double](localMat.numRows, localMat.numCols, localMat.toArray)
       }
    +
    +  /** Adds two block matrices together. The matrices must have the same 
size and matching
    +    * `rowsPerBlock` and `colsPerBlock` values. */
    +  def add(other: BlockMatrix): BlockMatrix = {
    +    require(numRows() == other.numRows(), "Both matrices must have the 
same number of rows. " +
    +      s"A.numRows: ${numRows()}, B.numRows: ${other.numRows()}")
    +    require(numCols() == other.numCols(), "Both matrices must have the 
same number of columns. " +
    +      s"A.numCols: ${numCols()}, B.numCols: ${other.numCols()}")
    +    if (checkPartitioning(other, OperationNames.add)) {
    +      val addedBlocks = blocks.cogroup(other.blocks, partitioner).
    +        map { case ((blockRowIndex, blockColIndex), (a, b)) =>
    +          if (a.isEmpty) {
    +            new MatrixBlock((blockRowIndex, blockColIndex), b.head)
    +          } else if (b.isEmpty) {
    +            new MatrixBlock((blockRowIndex, blockColIndex), a.head)
    +          } else {
    +            val result = a.head.toBreeze + b.head.toBreeze
    +            new MatrixBlock((blockRowIndex, blockColIndex), 
Matrices.fromBreeze(result))
    +          }
    +      }
    +      new BlockMatrix(addedBlocks, rowsPerBlock, colsPerBlock, numRows(), 
numCols())
    +    } else {
    +      throw new SparkException(
    +        "Cannot add matrices with non-matching partitioners")
    +    }
    +  }
    +
    +  /** Left multiplies this [[BlockMatrix]] to `other`, another 
[[BlockMatrix]]. The `colsPerBlock`
    +    * of this matrix must equal the `rowsPerBlock` of `other`. If `other` 
contains
    +    * [[SparseMatrix]], they will have to be converted to a
    +    * [[DenseMatrix]]. This may cause some performance issues until 
support for multiplying
    +    * two sparse matrices is added.
    +    */
    +  def multiply(other: BlockMatrix): BlockMatrix = {
    +    require(numCols() == other.numRows(), "The number of columns of A and 
the number of rows " +
    +      s"of B must be equal. A.numCols: ${numCols()}, B.numRows: 
${other.numRows()}. If you " +
    +      s"think they should be equal, try setting the dimensions of A and B 
explicitly while " +
    +      s"initializing them.")
    +    if (checkPartitioning(other, OperationNames.multiply)) {
    +      val resultPartitioner = GridPartitioner(numRowBlocks, 
other.numColBlocks,
    +        math.min(partitioner.numPartitions, 
other.partitioner.numPartitions))
    +      // Each block of A must be multiplied with the corresponding blocks 
in each column of B.
    +      val flatA = blocks.flatMap{ case ((blockRowIndex, blockColIndex), 
block) =>
    +        Array.tabulate(other.numColBlocks)(j => ((blockRowIndex, j, 
blockColIndex), block))
    +      }
    +      // Each block of B must be multiplied with the corresponding blocks 
in each row of A.
    +      val flatB = other.blocks.flatMap{ case ((blockRowIndex, 
blockColIndex), block) =>
    +        Array.tabulate(numRowBlocks)(i => ((i, blockColIndex, 
blockRowIndex), block))
    +      }
    +      val newBlocks: RDD[MatrixBlock] = flatA.join(flatB, 
resultPartitioner).
    +        map { case ((blockRowIndex, blockColIndex, _), (mat1, mat2)) =>
    +          val C = mat2 match {
    +            case dense: DenseMatrix => mat1.multiply(dense)
    +            case sparse: SparseMatrix => mat1.multiply(sparse.toDense())
    +            case _ =>  throw new SparkException(s"Unrecognized matrix type 
${mat2.getClass}.")
    +          }
    +          ((blockRowIndex, blockColIndex), C.toBreeze)
    +      }.reduceByKey(resultPartitioner, (a, b) => a + 
b).mapValues(Matrices.fromBreeze)
    --- End diff --
    
    The only problem I see there is that we need to know whether the block is 
on the right or bottom edge to properly initialize a `ZeroValue`.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to