EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1031346132
##
python/pyspark/worker.py:
##
@@ -146,7 +146,52 @@ def verify_result_type(result):
)
-def wrap_cogrouped_map_pandas_udf(f, return_type, argspec):
+def verif
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1031345438
##
python/pyspark/worker.py:
##
@@ -146,7 +146,52 @@ def verify_result_type(result):
)
-def wrap_cogrouped_map_pandas_udf(f, return_type, argspec):
+def verif
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1031150953
##
python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py:
##
@@ -165,100 +148,191 @@ def merge_pandas(lft, _):
)
def test_apply_in_panda
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1031150574
##
python/pyspark/worker.py:
##
@@ -146,7 +146,74 @@ def verify_result_type(result):
)
-def wrap_cogrouped_map_pandas_udf(f, return_type, argspec):
+def verif
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1015075283
##
python/pyspark/worker.py:
##
@@ -188,22 +241,7 @@ def wrapped(key_series, value_series):
elif len(argspec.args) == 2:
key = tuple(s[0] for s i
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1015075103
##
python/pyspark/sql/tests/pandas/test_pandas_cogrouped_map.py:
##
@@ -165,100 +148,191 @@ def merge_pandas(lft, _):
)
def test_apply_in_panda
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1014612015
##
python/pyspark/worker.py:
##
@@ -159,27 +226,13 @@ def wrapped(left_key_series, left_value_series,
right_key_series, right_value_se
key_series = left_
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1013672643
##
python/pyspark/worker.py:
##
@@ -159,27 +226,13 @@ def wrapped(left_key_series, left_value_series,
right_key_series, right_value_se
key_series = left_
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1013668836
##
python/pyspark/worker.py:
##
@@ -146,7 +146,74 @@ def verify_result_type(result):
)
-def wrap_cogrouped_map_pandas_udf(f, return_type, argspec):
+def verif
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1011250306
##
python/pyspark/worker.py:
##
@@ -146,7 +146,74 @@ def verify_result_type(result):
)
-def wrap_cogrouped_map_pandas_udf(f, return_type, argspec):
+def verif
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1001606273
##
python/pyspark/sql/pandas/serializers.py:
##
@@ -216,7 +216,7 @@ def _create_batch(self, series):
series = [series]
series = ((s, None) if not
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1001564996
##
python/pyspark/sql/pandas/serializers.py:
##
@@ -216,7 +216,7 @@ def _create_batch(self, series):
series = [series]
series = ((s, None) if not
EnricoMi commented on code in PR #38223:
URL: https://github.com/apache/spark/pull/38223#discussion_r1001556844
##
python/pyspark/sql/pandas/serializers.py:
##
@@ -216,7 +216,7 @@ def _create_batch(self, series):
series = [series]
series = ((s, None) if not
13 matches
Mail list logo