Repository: spark
Updated Branches:
  refs/heads/master 7fe5cff05 -> 5e5d886a2


[SPARK-25856][SQL][MINOR] Remove AverageLike and CountLike classes

## What changes were proposed in this pull request?
These two classes were added for regr_ expression support (SPARK-23907). These 
have been removed and hence we can remove these base classes and inline the 
logic in the concrete classes.
## How was this patch tested?
Existing tests.

Closes #22856 from dilipbiswal/average_cleanup.

Authored-by: Dilip Biswal <dbis...@us.ibm.com>
Signed-off-by: Sean Owen <sean.o...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5e5d886a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5e5d886a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5e5d886a

Branch: refs/heads/master
Commit: 5e5d886a2bc291a707cf4a6c70ecc6de6f8e990d
Parents: 7fe5cff
Author: Dilip Biswal <dbis...@us.ibm.com>
Authored: Mon Oct 29 12:56:06 2018 -0500
Committer: Sean Owen <sean.o...@databricks.com>
Committed: Mon Oct 29 12:56:06 2018 -0500

----------------------------------------------------------------------
 .../expressions/aggregate/Average.scala         | 33 +++++++++-----------
 .../catalyst/expressions/aggregate/Count.scala  | 28 +++++++----------
 2 files changed, 25 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5e5d886a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
index 5ecb77b..8dd80dc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala
@@ -23,9 +23,21 @@ import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.util.TypeUtils
 import org.apache.spark.sql.types._
 
-abstract class AverageLike(child: Expression) extends DeclarativeAggregate {
+@ExpressionDescription(
+  usage = "_FUNC_(expr) - Returns the mean calculated from values of a group.")
+case class Average(child: Expression) extends DeclarativeAggregate with 
ImplicitCastInputTypes {
+
+  override def prettyName: String = "avg"
+
+  override def children: Seq[Expression] = child :: Nil
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
+
+  override def checkInputDataTypes(): TypeCheckResult =
+    TypeUtils.checkForNumericExpr(child.dataType, "function average")
 
   override def nullable: Boolean = true
+
   // Return data type.
   override def dataType: DataType = resultType
 
@@ -63,28 +75,11 @@ abstract class AverageLike(child: Expression) extends 
DeclarativeAggregate {
       sum.cast(resultType) / count.cast(resultType)
   }
 
-  protected def updateExpressionsDef: Seq[Expression] = Seq(
+  override lazy val updateExpressions: Seq[Expression] = Seq(
     /* sum = */
     Add(
       sum,
       coalesce(child.cast(sumDataType), Literal(0).cast(sumDataType))),
     /* count = */ If(child.isNull, count, count + 1L)
   )
-
-  override lazy val updateExpressions = updateExpressionsDef
-}
-
-@ExpressionDescription(
-  usage = "_FUNC_(expr) - Returns the mean calculated from values of a group.")
-case class Average(child: Expression)
-  extends AverageLike(child) with ImplicitCastInputTypes {
-
-  override def prettyName: String = "avg"
-
-  override def children: Seq[Expression] = child :: Nil
-
-  override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
-
-  override def checkInputDataTypes(): TypeCheckResult =
-    TypeUtils.checkForNumericExpr(child.dataType, "function average")
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/5e5d886a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Count.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Count.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Count.scala
index 8cab8e4..d402f2d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Count.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Count.scala
@@ -21,10 +21,17 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.types._
 
-/**
- * Base class for all counting aggregators.
- */
-abstract class CountLike extends DeclarativeAggregate {
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(*) - Returns the total number of retrieved rows, including rows 
containing null.
+
+    _FUNC_(expr[, expr...]) - Returns the number of rows for which the 
supplied expression(s) are all non-null.
+
+    _FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for which 
the supplied expression(s) are unique and non-null.
+  """)
+// scalastyle:on line.size.limit
+case class Count(children: Seq[Expression]) extends DeclarativeAggregate {
   override def nullable: Boolean = false
 
   // Return data type.
@@ -45,19 +52,6 @@ abstract class CountLike extends DeclarativeAggregate {
   override lazy val evaluateExpression = count
 
   override def defaultResult: Option[Literal] = Option(Literal(0L))
-}
-
-// scalastyle:off line.size.limit
-@ExpressionDescription(
-  usage = """
-    _FUNC_(*) - Returns the total number of retrieved rows, including rows 
containing null.
-
-    _FUNC_(expr[, expr...]) - Returns the number of rows for which the 
supplied expression(s) are all non-null.
-
-    _FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for which 
the supplied expression(s) are unique and non-null.
-  """)
-// scalastyle:on line.size.limit
-case class Count(children: Seq[Expression]) extends CountLike {
 
   override lazy val updateExpressions = {
     val nullableChildren = children.filter(_.nullable)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to