Github user ueshin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21050#discussion_r196654969
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -2355,3 +2356,281 @@ case class ArrayRemove(left: Expression, right: 
Expression)
     
       override def prettyName: String = "array_remove"
     }
    +
    +/**
    + * Removes duplicate values from the array.
    + */
    +@ExpressionDescription(
    +  usage = "_FUNC_(array) - Removes duplicate values from the array.",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(1, 2, 3, null, 3));
    +       [1,2,3,null]
    +  """, since = "2.4.0")
    +case class ArrayDistinct(child: Expression)
    +  extends UnaryExpression with ExpectsInputTypes {
    +
    +  override def inputTypes: Seq[AbstractDataType] = Seq(ArrayType)
    +
    +  override def dataType: DataType = child.dataType
    +
    +  @transient lazy val elementType: DataType = 
dataType.asInstanceOf[ArrayType].elementType
    +
    +  @transient private lazy val ordering: Ordering[Any] =
    +    TypeUtils.getInterpretedOrdering(elementType)
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    super.checkInputDataTypes() match {
    +      case f: TypeCheckResult.TypeCheckFailure => f
    +      case TypeCheckResult.TypeCheckSuccess =>
    +        TypeUtils.checkForOrderingExpr(elementType, s"function 
$prettyName")
    +    }
    +  }
    +
    +  @transient private lazy val elementTypeSupportEquals = elementType match 
{
    +    case BinaryType => false
    +    case _: AtomicType => true
    +    case _ => false
    +  }
    +
    +  override def nullSafeEval(array: Any): Any = {
    +    val data = array.asInstanceOf[ArrayData].toArray[AnyRef](elementType)
    +    if (elementTypeSupportEquals) {
    +      new GenericArrayData(data.distinct.asInstanceOf[Array[Any]])
    +    } else {
    +      var foundNullElement = false
    +      var pos = 0
    +      for(i <- 0 until data.length) {
    +        if (data(i) == null) {
    +          if (!foundNullElement) {
    +            foundNullElement = true
    +            pos = pos + 1
    +          }
    +        } else {
    +          var j = 0
    +          var done = false
    +          while (j <= i && !done) {
    +            if (data(j) != null && ordering.equiv(data(j), data(i))) {
    +              done = true
    +            }
    +            j = j + 1
    +          }
    +          if (i == j-1) {
    +            pos = pos + 1
    +          }
    +        }
    +      }
    +      new GenericArrayData(data.slice(0, pos))
    +    }
    +  }
    +
    +  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    nullSafeCodeGen(ctx, ev, (array) => {
    +      val i = ctx.freshName("i")
    +      val j = ctx.freshName("j")
    +      val sizeOfDistinctArray = ctx.freshName("sizeOfDistinctArray")
    +      val getValue1 = CodeGenerator.getValue(array, elementType, i)
    +      val getValue2 = CodeGenerator.getValue(array, elementType, j)
    +      val foundNullElement = ctx.freshName("foundNullElement")
    +      val openHashSet = classOf[OpenHashSet[_]].getName
    +      val hs = ctx.freshName("hs")
    +      val classTag = s"scala.reflect.ClassTag$$.MODULE$$.Object()"
    +      if(elementTypeSupportEquals) {
    --- End diff --
    
    nit: `if (`?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to