yaooqinn commented on a change in pull request #26371: [SPARK-27976][SQL] Add 
built-in Array Functions: array_append
URL: https://github.com/apache/spark/pull/26371#discussion_r341893678
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ##########
 @@ -3928,3 +3928,128 @@ case class ArrayExcept(left: Expression, right: 
Expression) extends ArrayBinaryL
 
   override def prettyName: String = "array_except"
 }
+
+@ExpressionDescription(
+  usage = """
+  _FUNC_(array, element) - Returns an array of appending an element to the end 
of an array
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(array(1, 2, 3), 3);
+       [1,2,3,3]
+      > SELECT _FUNC_(array(1, 2, 3), null);
+       [1,2,3,null]
+      > SELECT _FUNC_(a, e) FROM VALUES (array(1,2), 3), (array(3, 4), null), 
(null, 3) tbl(a, e);
+       NULL
+       [1,2,3]
+       [3,4,null]
+  """,
+  since = "3.0.0")
+case class ArrayAppend(left: Expression, right: Expression) extends 
BinaryExpression {
 
 Review comment:
   > > ```sql
   > > postgres=# select array_cat(array[1,2], array[2]);
   > >  array_cat
   > > -----------
   > >  {1,2,2}
   > > (1 row)
   > > 
   > > postgres=# select concat(array[1,2], array[2]);
   > >   concat
   > > ----------
   > >  {1,2}{2}
   > > (1 row)
   > > ```
   > > 
   > > 
   > > ```sql
   > > select concat(array(1,2), array(2));
   > > [1,2,2]
   > > ```
   > 
   > This looks like Spark concat for array type is like Presto's array_cat. 
May not means it is wrong.
   
   Concat used to be a string function as Hive before 2.4, and I do not know 
which standard or practice it follows now

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to