spark git commit: [SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

2016-06-20 Thread davies
Repository: spark
Updated Branches:
  refs/heads/master e2b7eba87 -> a46553cba


[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

Fix the bug for Python UDF that does not have any arguments.

Added regression tests.

Author: Davies Liu 

Closes #13793 from davies/fix_no_arguments.

(cherry picked from commit abe36c53d126bb580e408a45245fd8e81806869c)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a46553cb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a46553cb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a46553cb

Branch: refs/heads/master
Commit: a46553cbacf0e4012df89fe55385dec5beaa680a
Parents: e2b7eba
Author: Davies Liu 
Authored: Mon Jun 20 20:50:30 2016 -0700
Committer: Davies Liu 
Committed: Mon Jun 20 20:53:45 2016 -0700

--
 python/pyspark/sql/tests.py | 5 +
 python/pyspark/sql/types.py | 9 +++--
 2 files changed, 8 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/a46553cb/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index c631ad8..ecd1a05 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -318,6 +318,11 @@ class SQLTests(ReusedPySparkTestCase):
 [row] = self.spark.sql("SELECT double(add(1, 2)), add(double(2), 
1)").collect()
 self.assertEqual(tuple(row), (6, 5))
 
+def test_udf_without_arguments(self):
+self.sqlCtx.registerFunction("foo", lambda: "bar")
+[row] = self.sqlCtx.sql("SELECT foo()").collect()
+self.assertEqual(row[0], "bar")
+
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/a46553cb/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index bb2b954..f0b56be 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1401,11 +1401,7 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if args:
-# create row class or objects
-return tuple.__new__(self, args)
-
-elif kwargs:
+if kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1413,7 +1409,8 @@ class Row(tuple):
 return row
 
 else:
-raise ValueError("No args or kwargs")
+# create row class or objects
+return tuple.__new__(self, args)
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

2016-06-20 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 f57317690 -> 087bd2799


[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

Fix the bug for Python UDF that does not have any arguments.

Added regression tests.

Author: Davies Liu 

Closes #13793 from davies/fix_no_arguments.

(cherry picked from commit abe36c53d126bb580e408a45245fd8e81806869c)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/087bd279
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/087bd279
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/087bd279

Branch: refs/heads/branch-2.0
Commit: 087bd2799366f4914d248e9b1f0fb921adbbdb43
Parents: f573176
Author: Davies Liu 
Authored: Mon Jun 20 20:50:30 2016 -0700
Committer: Davies Liu 
Committed: Mon Jun 20 20:52:55 2016 -0700

--
 python/pyspark/sql/tests.py | 5 +
 python/pyspark/sql/types.py | 9 +++--
 2 files changed, 8 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/087bd279/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index c631ad8..ecd1a05 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -318,6 +318,11 @@ class SQLTests(ReusedPySparkTestCase):
 [row] = self.spark.sql("SELECT double(add(1, 2)), add(double(2), 
1)").collect()
 self.assertEqual(tuple(row), (6, 5))
 
+def test_udf_without_arguments(self):
+self.sqlCtx.registerFunction("foo", lambda: "bar")
+[row] = self.sqlCtx.sql("SELECT foo()").collect()
+self.assertEqual(row[0], "bar")
+
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/087bd279/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index bb2b954..f0b56be 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1401,11 +1401,7 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if args:
-# create row class or objects
-return tuple.__new__(self, args)
-
-elif kwargs:
+if kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1413,7 +1409,8 @@ class Row(tuple):
 return row
 
 else:
-raise ValueError("No args or kwargs")
+# create row class or objects
+return tuple.__new__(self, args)
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

2016-06-20 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 1891e04a6 -> 6001138fd


[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

## What changes were proposed in this pull request?

Fix the bug for Python UDF that does not have any arguments.

## How was this patch tested?

Added regression tests.

Author: Davies Liu 

Closes #13793 from davies/fix_no_arguments.

(cherry picked from commit abe36c53d126bb580e408a45245fd8e81806869c)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6001138f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6001138f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6001138f

Branch: refs/heads/branch-1.5
Commit: 6001138fd68f2318028519d09563f12874b54e7d
Parents: 1891e04
Author: Davies Liu 
Authored: Mon Jun 20 20:50:30 2016 -0700
Committer: Davies Liu 
Committed: Mon Jun 20 20:50:57 2016 -0700

--
 python/pyspark/sql/tests.py | 5 +
 python/pyspark/sql/types.py | 9 +++--
 2 files changed, 8 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/6001138f/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 27c9d45..86e2dfb 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -286,6 +286,11 @@ class SQLTests(ReusedPySparkTestCase):
 [res] = self.sqlCtx.sql("SELECT strlen(a) FROM test WHERE strlen(a) > 
1").collect()
 self.assertEqual(4, res[0])
 
+def test_udf_without_arguments(self):
+self.sqlCtx.registerFunction("foo", lambda: "bar")
+[row] = self.sqlCtx.sql("SELECT foo()").collect()
+self.assertEqual(row[0], "bar")
+
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/6001138f/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index b0ac207..db4cc42 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1193,11 +1193,7 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if args:
-# create row class or objects
-return tuple.__new__(self, args)
-
-elif kwargs:
+if kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1205,7 +1201,8 @@ class Row(tuple):
 return row
 
 else:
-raise ValueError("No args or kwargs")
+# create row class or objects
+return tuple.__new__(self, args)
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

2016-06-20 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 db86e7fd2 -> abe36c53d


[SPARK-16086] [SQL] fix Python UDF without arguments (for 1.6)

## What changes were proposed in this pull request?

Fix the bug for Python UDF that does not have any arguments.

## How was this patch tested?

Added regression tests.

Author: Davies Liu 

Closes #13793 from davies/fix_no_arguments.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/abe36c53
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/abe36c53
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/abe36c53

Branch: refs/heads/branch-1.6
Commit: abe36c53d126bb580e408a45245fd8e81806869c
Parents: db86e7f
Author: Davies Liu 
Authored: Mon Jun 20 20:50:30 2016 -0700
Committer: Davies Liu 
Committed: Mon Jun 20 20:50:30 2016 -0700

--
 python/pyspark/sql/tests.py | 5 +
 python/pyspark/sql/types.py | 9 +++--
 2 files changed, 8 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/abe36c53/python/pyspark/sql/tests.py
--
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 0dc4274..43eb6ec 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -305,6 +305,11 @@ class SQLTests(ReusedPySparkTestCase):
 [res] = self.sqlCtx.sql("SELECT strlen(a) FROM test WHERE strlen(a) > 
1").collect()
 self.assertEqual(4, res[0])
 
+def test_udf_without_arguments(self):
+self.sqlCtx.registerFunction("foo", lambda: "bar")
+[row] = self.sqlCtx.sql("SELECT foo()").collect()
+self.assertEqual(row[0], "bar")
+
 def test_udf_with_array_type(self):
 d = [Row(l=list(range(3)), d={"key": list(range(5))})]
 rdd = self.sc.parallelize(d)

http://git-wip-us.apache.org/repos/asf/spark/blob/abe36c53/python/pyspark/sql/types.py
--
diff --git a/python/pyspark/sql/types.py b/python/pyspark/sql/types.py
index 5bc0773..211b01f 100644
--- a/python/pyspark/sql/types.py
+++ b/python/pyspark/sql/types.py
@@ -1195,11 +1195,7 @@ class Row(tuple):
 if args and kwargs:
 raise ValueError("Can not use both args "
  "and kwargs to create Row")
-if args:
-# create row class or objects
-return tuple.__new__(self, args)
-
-elif kwargs:
+if kwargs:
 # create row objects
 names = sorted(kwargs.keys())
 row = tuple.__new__(self, [kwargs[n] for n in names])
@@ -1207,7 +1203,8 @@ class Row(tuple):
 return row
 
 else:
-raise ValueError("No args or kwargs")
+# create row class or objects
+return tuple.__new__(self, args)
 
 def asDict(self, recursive=False):
 """


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org