Repository: spark
Updated Branches:
  refs/heads/master 0bc8847aa -> 3fbf0a5f9


[MINOR][DOCS] Match several documentation changes in Scala to R/Python

## What changes were proposed in this pull request?

This PR proposes to match minor documentations changes in 
https://github.com/apache/spark/pull/17399 and 
https://github.com/apache/spark/pull/17380 to R/Python.

## How was this patch tested?

Manual tests in Python , Python tests via `./python/run-tests.py 
--module=pyspark-sql` and lint-checks for Python/R.

Author: hyukjinkwon <gurwls...@gmail.com>

Closes #17429 from HyukjinKwon/minor-match-doc.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3fbf0a5f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3fbf0a5f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3fbf0a5f

Branch: refs/heads/master
Commit: 3fbf0a5f9297f438bc92db11f106d4a0ae568613
Parents: 0bc8847
Author: hyukjinkwon <gurwls...@gmail.com>
Authored: Sun Mar 26 18:40:00 2017 -0700
Committer: Felix Cheung <felixche...@apache.org>
Committed: Sun Mar 26 18:40:00 2017 -0700

----------------------------------------------------------------------
 R/pkg/R/functions.R             | 6 +++---
 python/pyspark/sql/functions.py | 8 ++++----
 python/pyspark/sql/tests.py     | 8 ++++++++
 3 files changed, 15 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3fbf0a5f/R/pkg/R/functions.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 2cff3ac..449476d 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -2632,8 +2632,8 @@ setMethod("date_sub", signature(y = "Column", x = 
"numeric"),
 
 #' format_number
 #'
-#' Formats numeric column y to a format like '#,###,###.##', rounded to x 
decimal places,
-#' and returns the result as a string column.
+#' Formats numeric column y to a format like '#,###,###.##', rounded to x 
decimal places
+#' with HALF_EVEN round mode, and returns the result as a string column.
 #'
 #' If x is 0, the result has no decimal point or fractional part.
 #' If x < 0, the result will be null.
@@ -3548,7 +3548,7 @@ setMethod("row_number",
 
 #' array_contains
 #'
-#' Returns true if the array contain the value.
+#' Returns null if the array is null, true if the array contains the value, 
and false otherwise.
 #'
 #' @param x A Column
 #' @param value A value to be checked if contained in the column

http://git-wip-us.apache.org/repos/asf/spark/blob/3fbf0a5f/python/pyspark/sql/functions.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index f9121e6..843ae38 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -1327,8 +1327,8 @@ def encode(col, charset):
 @since(1.5)
 def format_number(col, d):
     """
-    Formats the number X to a format like '#,--#,--#.--', rounded to d decimal 
places,
-    and returns the result as a string.
+    Formats the number X to a format like '#,--#,--#.--', rounded to d decimal 
places
+    with HALF_EVEN round mode, and returns the result as a string.
 
     :param col: the column name of the numeric value to be formatted
     :param d: the N decimal places
@@ -1675,8 +1675,8 @@ def array(*cols):
 @since(1.5)
 def array_contains(col, value):
     """
-    Collection function: returns True if the array contains the given value. 
The collection
-    elements and value must be of the same type.
+    Collection function: returns null if the array is null, true if the array 
contains the
+    given value, and false otherwise.
 
     :param col: name of column containing array
     :param value: value to check for in array

http://git-wip-us.apache.org/repos/asf/spark/blob/3fbf0a5f/python/pyspark/sql/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index b93b7ed..db41b4e 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -1129,6 +1129,14 @@ class SQLTests(ReusedPySparkTestCase):
         rndn2 = df.select('key', functions.randn(0)).collect()
         self.assertEqual(sorted(rndn1), sorted(rndn2))
 
+    def test_array_contains_function(self):
+        from pyspark.sql.functions import array_contains
+
+        df = self.spark.createDataFrame([(["1", "2", "3"],), ([],)], ['data'])
+        actual = df.select(array_contains(df.data, 1).alias('b')).collect()
+        # The value argument can be implicitly castable to the element's type 
of the array.
+        self.assertEqual([Row(b=True), Row(b=False)], actual)
+
     def test_between_function(self):
         df = self.sc.parallelize([
             Row(a=1, b=2, c=3),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to