Github user MLnick commented on a diff in the pull request: https://github.com/apache/spark/pull/18902#discussion_r132939361 --- Diff: mllib/src/main/scala/org/apache/spark/ml/feature/Imputer.scala --- @@ -133,23 +134,29 @@ class Imputer @Since("2.2.0") (@Since("2.2.0") override val uid: String) override def fit(dataset: Dataset[_]): ImputerModel = { transformSchema(dataset.schema, logging = true) val spark = dataset.sparkSession - import spark.implicits._ - val surrogates = $(inputCols).map { inputCol => - val ic = col(inputCol) - val filtered = dataset.select(ic.cast(DoubleType)) - .filter(ic.isNotNull && ic =!= $(missingValue) && !ic.isNaN) - if(filtered.take(1).length == 0) { - throw new SparkException(s"surrogate cannot be computed. " + - s"All the values in $inputCol are Null, Nan or missingValue(${$(missingValue)})") - } - val surrogate = $(strategy) match { - case Imputer.mean => filtered.select(avg(inputCol)).as[Double].first() - case Imputer.median => filtered.stat.approxQuantile(inputCol, Array(0.5), 0.001).head - } - surrogate + + val selected = dataset.select($(inputCols).map(col(_).cast("double")): _*).rdd + + val summarizer = $(strategy) match { + case Imputer.mean => + new Imputer.MeanSummarizer($(inputCols).length, $(missingValue)) + case Imputer.median => + new Imputer.MedianSummarizer($(inputCols).length, $(missingValue)) + } + + val summary = selected.treeAggregate(summarizer)( + seqOp = { case (sum, row) => sum.update(row) }, + combOp = { case (sum1, sum2) => sum1.merge(sum2) } + ) + + val emptyCols = ($(inputCols) zip summary.counts).filter(_._2 == 0).map(_._1) + if(emptyCols.nonEmpty) { --- End diff -- Style: space between `if` and `(`
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org