Github user mgaido91 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21011#discussion_r181376485
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -287,3 +288,173 @@ case class ArrayContains(left: Expression, right: 
Expression)
     
       override def prettyName: String = "array_contains"
     }
    +
    +/**
    + * Creates a String containing all the elements of the input array 
separated by the delimiter.
    + */
    +@ExpressionDescription(
    +  usage = """
    +    _FUNC_(array, delimiter[, nullReplacement]) - Concatenates the 
elements of the given array
    +      using the delimiter and an optional string to replace nulls. If no 
value is set for
    +      nullReplacement, any null value is filtered.""",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array('hello', 'world'), ' ');
    +       hello world
    +      > SELECT _FUNC_(array('hello', null ,'world'), ' ');
    +       hello world
    +      > SELECT _FUNC_(array('hello', null ,'world'), ' ', ',');
    +       hello , world
    +  """, since = "2.4.0")
    +case class ArrayJoin(
    +    array: Expression,
    +    delimiter: Expression,
    +    nullReplacement: Option[Expression]) extends Expression with 
ExpectsInputTypes {
    +
    +  def this(array: Expression, delimiter: Expression) = this(array, 
delimiter, None)
    +
    +  def this(array: Expression, delimiter: Expression, nullReplacement: 
Expression) =
    +    this(array, delimiter, Some(nullReplacement))
    +
    +  override def inputTypes: Seq[AbstractDataType] = if 
(nullReplacement.isDefined) {
    +      Seq(ArrayType(StringType), StringType, StringType)
    +    } else {
    +      Seq(ArrayType(StringType), StringType)
    +    }
    --- End diff --
    
    I don't think the indent is wrong since this is for the if...else and not 
for the method itself


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to