[ https://issues.apache.org/jira/browse/SPARK-41820?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Sandeep Singh updated SPARK-41820: ---------------------------------- Description: {code:java} File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/dataframe.py", line 1292, in pyspark.sql.connect.dataframe.DataFrame.createOrReplaceGlobalTempView Failed example: df2.createOrReplaceGlobalTempView("people") Exception raised: Traceback (most recent call last): File "/usr/local/Cellar/python@3.10/3.10.8/Frameworks/Python.framework/Versions/3.10/lib/python3.10/doctest.py", line 1350, in __run exec(compile(example.source, filename, "single", File "<doctest pyspark.sql.connect.dataframe.DataFrame.createOrReplaceGlobalTempView[3]>", line 1, in <module> df2.createOrReplaceGlobalTempView("people") File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/dataframe.py", line 1192, in createOrReplaceGlobalTempView self._session.client.execute_command(command) File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", line 459, in execute_command self._execute(req) File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", line 547, in _execute self._handle_error(rpc_error) File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", line 625, in _handle_error raise SparkConnectException(status.message) from None pyspark.sql.connect.client.SparkConnectException: requirement failed {code} was: {code} File "/.../spark/python/pyspark/sql/connect/column.py", line 106, in pyspark.sql.connect.column.Column.eqNullSafe Failed example: df1.join(df2, df1["value"] == df2["value"]).count() Exception raised: Traceback (most recent call last): File "/.../miniconda3/envs/python3.9/lib/python3.9/doctest.py", line 1336, in __run exec(compile(example.source, filename, "single", File "<doctest pyspark.sql.connect.column.Column.eqNullSafe[4]>", line 1, in <module> df1.join(df2, df1["value"] == df2["value"]).count() File "/.../spark/python/pyspark/sql/connect/dataframe.py", line 151, in count pdd = self.agg(_invoke_function("count", lit(1))).toPandas() File "/.../spark/python/pyspark/sql/connect/dataframe.py", line 1031, in toPandas return self._session.client.to_pandas(query) File "/.../spark/python/pyspark/sql/connect/client.py", line 413, in to_pandas return self._execute_and_fetch(req) File "/.../spark/python/pyspark/sql/connect/client.py", line 573, in _execute_and_fetch self._handle_error(rpc_error) File "/.../spark/python/pyspark/sql/connect/client.py", line 619, in _handle_error raise SparkConnectAnalysisException( pyspark.sql.connect.client.SparkConnectAnalysisException: [AMBIGUOUS_REFERENCE] Reference `value` is ambiguous, could be: [`value`, `value`]. {code} > DataFrame.createOrReplaceGlobalTempView - SparkConnectException: requirement > failed > ----------------------------------------------------------------------------------- > > Key: SPARK-41820 > URL: https://issues.apache.org/jira/browse/SPARK-41820 > Project: Spark > Issue Type: Sub-task > Components: Connect > Affects Versions: 3.4.0 > Reporter: Sandeep Singh > Priority: Major > > {code:java} > File > "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/dataframe.py", > line 1292, in > pyspark.sql.connect.dataframe.DataFrame.createOrReplaceGlobalTempView > Failed example: > df2.createOrReplaceGlobalTempView("people") > Exception raised: > Traceback (most recent call last): > File > "/usr/local/Cellar/python@3.10/3.10.8/Frameworks/Python.framework/Versions/3.10/lib/python3.10/doctest.py", > line 1350, in __run > exec(compile(example.source, filename, "single", > File "<doctest > pyspark.sql.connect.dataframe.DataFrame.createOrReplaceGlobalTempView[3]>", > line 1, in <module> > df2.createOrReplaceGlobalTempView("people") > File > "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/dataframe.py", > line 1192, in createOrReplaceGlobalTempView > self._session.client.execute_command(command) > File > "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", > line 459, in execute_command > self._execute(req) > File > "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", > line 547, in _execute > self._handle_error(rpc_error) > File > "/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/client.py", > line 625, in _handle_error > raise SparkConnectException(status.message) from None > pyspark.sql.connect.client.SparkConnectException: requirement failed > {code} -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org