itholic commented on a change in pull request #34389: URL: https://github.com/apache/spark/pull/34389#discussion_r736185534
########## File path: python/pyspark/pandas/namespace.py ########## @@ -775,39 +795,46 @@ def read_parquet( index_names = None - if index_col is None and pandas_metadata: - # Try to read pandas metadata - - @no_type_check - @pandas_udf("index_col array<string>, index_names array<string>") - def read_index_metadata(pser: pd.Series) -> pd.DataFrame: - binary = pser.iloc[0] - metadata = pq.ParquetFile(pa.BufferReader(binary)).metadata.metadata - if b"pandas" in metadata: - pandas_metadata = json.loads(metadata[b"pandas"].decode("utf8")) - if all(isinstance(col, str) for col in pandas_metadata["index_columns"]): - index_col = [] - index_names = [] - for col in pandas_metadata["index_columns"]: - index_col.append(col) - for column in pandas_metadata["columns"]: - if column["field_name"] == col: - index_names.append(column["name"]) - break - else: - index_names.append(None) - return pd.DataFrame({"index_col": [index_col], "index_names": [index_names]}) - return pd.DataFrame({"index_col": [None], "index_names": [None]}) - - index_col, index_names = ( - default_session() - .read.format("binaryFile") - .load(path) - .limit(1) - .select(read_index_metadata("content").alias("index_metadata")) - .select("index_metadata.*") - .head() + if index_col is None: Review comment: Ah, then do you think should it gonna be like ```python if index_col is None and pandas_metadata: ... elif index_col is None: raise_advice_warning( ... ``` ?? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org