itholic commented on a change in pull request #34931: URL: https://github.com/apache/spark/pull/34931#discussion_r772726514
########## File path: python/pyspark/pandas/frame.py ########## @@ -8828,22 +8843,135 @@ def describe(self, percentiles: Optional[List[float]] = None) -> "DataFrame": else: percentiles = [0.25, 0.5, 0.75] - formatted_perc = ["{:.0%}".format(p) for p in sorted(percentiles)] - stats = ["count", "mean", "stddev", "min", *formatted_perc, "max"] + if len(exprs_numeric) == 0: + if len(exprs_non_numeric) == 0: + raise ValueError("Cannot describe a DataFrame without columns") - sdf = self._internal.spark_frame.select(*exprs).summary(*stats) - sdf = sdf.replace("stddev", "std", subset=["summary"]) + # Handling non-numeric type columns + # We will retrive the `count`, `unique`, `top` and `freq`. + sdf = self._internal.spark_frame.select(*exprs_non_numeric) - internal = InternalFrame( - spark_frame=sdf, - index_spark_columns=[scol_for(sdf, "summary")], - column_labels=column_labels, - data_spark_columns=[ - scol_for(sdf, self._internal.spark_column_name_for(label)) - for label in column_labels - ], - ) - return DataFrame(internal).astype("float64") + # Get `count` & `unique` for each columns + counts, uniques = map(lambda x: x[1:], sdf.summary("count", "count_distinct").take(2)) + + # Get `top` & `freq` for each columns + tops = [] + freqs = [] + for column in exprs_non_numeric: + top, freq = sdf.groupby(column).count().sort("count", ascending=False).first() Review comment: Makes sense. Let me leave the TODO comment for now, and revisit after writing a Scala util. I'll soon create a related JIRA and add a link here. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org