Github user DylanGuedes commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21045#discussion_r187775631
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -90,6 +90,112 @@ case class MapKeys(child: Expression)
       override def prettyName: String = "map_keys"
     }
     
    +@ExpressionDescription(
    +  usage = """_FUNC_(a1, a2) - Returns a merged array matching N-th element 
of first
    +  array with the N-th element of second.""",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(1, 2, 3), array(2, 3, 4));
    +        [[1, 2], [2, 3], [3, 4]]
    +  """,
    +  since = "2.4.0")
    +case class ZipLists(left: Expression, right: Expression)
    +  extends BinaryExpression with ExpectsInputTypes {
    +
    +  override def inputTypes: Seq[AbstractDataType] = Seq(ArrayType, 
ArrayType)
    +
    +  override def dataType: DataType = ArrayType(StructType(
    +    StructField("_1", left.dataType.asInstanceOf[ArrayType].elementType, 
true) ::
    +    StructField("_2", right.dataType.asInstanceOf[ArrayType].elementType, 
true) ::
    +  Nil))
    +
    +  override def prettyName: String = "zip_lists"
    +
    +  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    nullSafeCodeGen(ctx, ev, (arr1, arr2) => {
    +      val genericArrayData = classOf[GenericArrayData].getName
    +      val genericInternalRow = classOf[GenericInternalRow].getName
    +
    +      val i = ctx.freshName("i")
    +      val values = ctx.freshName("values")
    +      val len1 = ctx.freshName("len1")
    +      val len2 = ctx.freshName("len2")
    +      val pair = ctx.freshName("pair")
    +      val getValue1 = CodeGenerator.getValue(
    +        arr1, left.dataType.asInstanceOf[ArrayType].elementType, i)
    +      val getValue2 = CodeGenerator.getValue(
    +        arr2, right.dataType.asInstanceOf[ArrayType].elementType, i)
    +
    +      s"""
    --- End diff --
    
    Done! I am currently using sbt and an editor, actually.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to