Github user HyukjinKwon commented on a diff in the pull request: https://github.com/apache/spark/pull/19325#discussion_r140623560 --- Diff: python/pyspark/sql/functions.py --- @@ -2183,14 +2183,29 @@ def pandas_udf(f=None, returnType=StringType()): :param f: python function if used as a standalone function :param returnType: a :class:`pyspark.sql.types.DataType` object - # TODO: doctest + >>> from pyspark.sql.types import IntegerType, StringType + >>> slen = pandas_udf(lambda s: s.str.len(), IntegerType()) + >>> @pandas_udf(returnType=StringType()) + ... def to_upper(s): + ... return s.str.upper() + ... + >>> @pandas_udf(returnType="integer") + ... def add_one(x): + ... return x + 1 + ... + >>> df = spark.createDataFrame([(1, "John Doe", 21)], ("id", "name", "age")) + >>> df.select(slen("name").alias("slen(name)"), to_upper("name"), add_one("age")).show() + +----------+--------------+------------+ + |slen(name)|to_upper(name)|add_one(age)| + +----------+--------------+------------+ + | 8| JOHN DOE| 22| + +----------+--------------+------------+ """ + wrapped_udf = _create_udf(f, returnType=returnType, vectorized=True) import inspect - # If function "f" does not define the optional kwargs, then wrap with a kwargs placeholder - if inspect.getargspec(f).keywords is None: - return _create_udf(lambda *a, **kwargs: f(*a), returnType=returnType, vectorized=True) - else: - return _create_udf(f, returnType=returnType, vectorized=True) + if not inspect.getargspec(wrapped_udf.func).args: --- End diff -- This is totally a personal preference based on my little experience. I usually avoid to use `if not something` expression .. because it confuses of the expected type, for example, this can be `None`, `0` or 0-length of list or tuples because it coerces this to a bool. To me, I usually do `is not None` or `len(..) > 0`. I am fine as is too (because I think it's a personal preference) but just wanted to leave a side note (and change it if this could persuade you too).
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org