This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8e20f8e3b440 [SPARK-47544][PYTHON] SparkSession builder method is 
incompatible with visual studio code intellisense
8e20f8e3b440 is described below

commit 8e20f8e3b4404b6d72ec47c546c94a040467c774
Author: Niranjan Jayakar <n...@databricks.com>
AuthorDate: Tue Mar 26 07:43:10 2024 -0700

    [SPARK-47544][PYTHON] SparkSession builder method is incompatible with 
visual studio code intellisense
    
    ### What changes were proposed in this pull request?
    
    VS Code's intellisense is unable to detect the methods and properties of
    `SparkSession.builder`. A video is worth a thousand words:
    
[video](https://github.com/apache/spark/assets/16217941/e611e7e7-8760-4d9f-aa6c-9d4bd519d516).
    
    Adjust the implementation for better compatibility with the IDE.
    
    ### Why are the changes needed?
    
    Compatibility with IDE tooling.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Built the wheel file locally and tested on local IDE.
    See 
[video](https://github.com/apache/spark/assets/16217941/429b06dd-44a7-4d13-a551-c2b72c326c1e).
    
    Confirmed the same works for Pycharm.
    
    Further confirmed that the Pydocs for these methods are unaffected.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #45700 from nija-at/vscode-intellisense.
    
    Authored-by: Niranjan Jayakar <n...@databricks.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 python/pyspark/sql/connect/session.py |  7 +++----
 python/pyspark/sql/session.py         | 29 ++++++++++++++++++-----------
 2 files changed, 21 insertions(+), 15 deletions(-)

diff --git a/python/pyspark/sql/connect/session.py 
b/python/pyspark/sql/connect/session.py
index f339fada0d11..2c08349a3300 100644
--- a/python/pyspark/sql/connect/session.py
+++ b/python/pyspark/sql/connect/session.py
@@ -236,10 +236,9 @@ class SparkSession:
 
     _client: SparkConnectClient
 
-    @classproperty
-    def builder(cls) -> Builder:
-        return cls.Builder()
-
+    # SPARK-47544: Explicitly declaring this as an identifier instead of a 
method.
+    # If changing, make sure this bug is not reintroduced.
+    builder: Builder = classproperty(lambda cls: cls.Builder())  # type: ignore
     builder.__doc__ = PySparkSession.builder.__doc__
 
     def __init__(self, connection: Union[str, DefaultChannelBuilder], userId: 
Optional[str] = None):
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index 6c80b7f42da4..4a8a653fd466 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -499,12 +499,18 @@ class SparkSession(SparkConversionMixin):
 
                             os.environ["SPARK_CONNECT_MODE_ENABLED"] = "1"
                             opts["spark.remote"] = url
-                            return 
RemoteSparkSession.builder.config(map=opts).getOrCreate()
+                            return cast(
+                                SparkSession,
+                                
RemoteSparkSession.builder.config(map=opts).getOrCreate(),
+                            )
                         elif "SPARK_LOCAL_REMOTE" in os.environ:
                             url = "sc://localhost"
                             os.environ["SPARK_CONNECT_MODE_ENABLED"] = "1"
                             opts["spark.remote"] = url
-                            return 
RemoteSparkSession.builder.config(map=opts).getOrCreate()
+                            return cast(
+                                SparkSession,
+                                
RemoteSparkSession.builder.config(map=opts).getOrCreate(),
+                            )
                         else:
                             raise PySparkRuntimeError(
                                 error_class="SESSION_ALREADY_EXIST",
@@ -560,14 +566,14 @@ class SparkSession(SparkConversionMixin):
                 # used in conjunction with Spark Connect mode.
                 os.environ["SPARK_CONNECT_MODE_ENABLED"] = "1"
                 opts["spark.remote"] = url
-                return RemoteSparkSession.builder.config(map=opts).create()
+                return cast(SparkSession, 
RemoteSparkSession.builder.config(map=opts).create())
             else:
                 raise PySparkRuntimeError(
                     error_class="ONLY_SUPPORTED_WITH_SPARK_CONNECT",
                     message_parameters={"feature": 
"SparkSession.builder.create"},
                 )
 
-    # TODO(SPARK-38912): Replace @classproperty with @classmethod + @property 
once support for
+    # TODO(SPARK-38912): Replace classproperty with @classmethod + @property 
once support for
     # Python 3.8 is dropped.
     #
     # In Python 3.9, the @property decorator has been made compatible with the
@@ -575,14 +581,15 @@ class SparkSession(SparkConversionMixin):
     #
     # @classmethod + @property is also affected by a bug in Python's docstring 
which was backported
     # to Python 3.9.6 (https://github.com/python/cpython/pull/28838)
-    @classproperty
-    def builder(cls) -> Builder:
-        """Creates a :class:`Builder` for constructing a :class:`SparkSession`.
+    #
+    # SPARK-47544: Explicitly declaring this as an identifier instead of a 
method.
+    # If changing, make sure this bug is not reintroduced.
+    builder: Builder = classproperty(lambda cls: cls.Builder())  # type: ignore
+    """Creates a :class:`Builder` for constructing a :class:`SparkSession`.
 
-        .. versionchanged:: 3.4.0
-            Supports Spark Connect.
-        """
-        return cls.Builder()
+    .. versionchanged:: 3.4.0
+        Supports Spark Connect.
+    """
 
     _instantiatedSession: ClassVar[Optional["SparkSession"]] = None
     _activeSession: ClassVar[Optional["SparkSession"]] = None


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to