This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 31a48381f51 [SPARK-46230][PYTHON] Migrate `RetriesExceeded` into 
PySpark error
31a48381f51 is described below

commit 31a48381f5139a51045a10df344df3ce7ad1adb7
Author: Haejoon Lee <haejoon....@databricks.com>
AuthorDate: Wed Dec 6 11:00:43 2023 -0800

    [SPARK-46230][PYTHON] Migrate `RetriesExceeded` into PySpark error
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to migrate `RetriesExceeded` into PySpark error.
    
    ### Why are the changes needed?
    
    All errors defined from PySpark should be inherits `PySparkException` to 
keep the consistency of error messages generated from PySpark.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, it's internal refactoring for better error handling.
    
    ### How was this patch tested?
    
    The existing CI should pass.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #44147 from itholic/retires_exception.
    
    Authored-by: Haejoon Lee <haejoon....@databricks.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 python/docs/source/reference/pyspark.errors.rst        |  1 +
 python/pyspark/errors/__init__.py                      |  2 ++
 python/pyspark/errors/error_classes.py                 |  5 +++++
 python/pyspark/errors/exceptions/base.py               |  7 +++++++
 python/pyspark/sql/connect/client/retries.py           | 11 ++---------
 python/pyspark/sql/tests/connect/client/test_client.py |  2 +-
 python/pyspark/sql/tests/connect/test_connect_basic.py |  3 ++-
 7 files changed, 20 insertions(+), 11 deletions(-)

diff --git a/python/docs/source/reference/pyspark.errors.rst 
b/python/docs/source/reference/pyspark.errors.rst
index a4997506b41..270a8a8c716 100644
--- a/python/docs/source/reference/pyspark.errors.rst
+++ b/python/docs/source/reference/pyspark.errors.rst
@@ -48,6 +48,7 @@ Classes
     PySparkIndexError
     PythonException
     QueryExecutionException
+    RetriesExceeded
     SessionNotSameException
     SparkRuntimeException
     SparkUpgradeException
diff --git a/python/pyspark/errors/__init__.py 
b/python/pyspark/errors/__init__.py
index 07033d21643..a4f64e85f87 100644
--- a/python/pyspark/errors/__init__.py
+++ b/python/pyspark/errors/__init__.py
@@ -46,6 +46,7 @@ from pyspark.errors.exceptions.base import (  # noqa: F401
     PySparkAssertionError,
     PySparkNotImplementedError,
     PySparkPicklingError,
+    RetriesExceeded,
     PySparkKeyError,
 )
 
@@ -78,5 +79,6 @@ __all__ = [
     "PySparkAssertionError",
     "PySparkNotImplementedError",
     "PySparkPicklingError",
+    "RetriesExceeded",
     "PySparkKeyError",
 ]
diff --git a/python/pyspark/errors/error_classes.py 
b/python/pyspark/errors/error_classes.py
index c93ffa94149..965fd04a913 100644
--- a/python/pyspark/errors/error_classes.py
+++ b/python/pyspark/errors/error_classes.py
@@ -813,6 +813,11 @@ ERROR_CLASSES_JSON = """
       "Columns do not match in their data type: <mismatch>."
     ]
   },
+  "RETRIES_EXCEEDED" : {
+    "message" : [
+      "The maximum number of retries has been exceeded."
+    ]
+  },
   "SCHEMA_MISMATCH_FOR_PANDAS_UDF" : {
     "message" : [
       "Result vector from pandas_udf was not the required length: expected 
<expected>, got <actual>."
diff --git a/python/pyspark/errors/exceptions/base.py 
b/python/pyspark/errors/exceptions/base.py
index b7d8ed88ec0..b60800da3ff 100644
--- a/python/pyspark/errors/exceptions/base.py
+++ b/python/pyspark/errors/exceptions/base.py
@@ -260,6 +260,13 @@ class PySparkPicklingError(PySparkException, 
PicklingError):
     """
 
 
+class RetriesExceeded(PySparkException):
+    """
+    Represents an exception which is considered retriable, but retry limits
+    were exceeded
+    """
+
+
 class PySparkKeyError(PySparkException, KeyError):
     """
     Wrapper class for KeyError to support error classes.
diff --git a/python/pyspark/sql/connect/client/retries.py 
b/python/pyspark/sql/connect/client/retries.py
index 88fc3fe1ffd..44e5e1834a2 100644
--- a/python/pyspark/sql/connect/client/retries.py
+++ b/python/pyspark/sql/connect/client/retries.py
@@ -22,7 +22,7 @@ import typing
 from typing import Optional, Callable, Generator, List, Type
 from types import TracebackType
 from pyspark.sql.connect.client.logging import logger
-from pyspark.errors import PySparkRuntimeError
+from pyspark.errors import PySparkRuntimeError, RetriesExceeded
 
 """
 This module contains retry system. The system is designed to be
@@ -233,7 +233,7 @@ class Retrying:
 
         # Exceeded retries
         logger.debug(f"Given up on retrying. error: {repr(exception)}")
-        raise RetriesExceeded from exception
+        raise RetriesExceeded(error_class="RETRIES_EXCEEDED", 
message_parameters={}) from exception
 
     def __iter__(self) -> Generator[AttemptManager, None, None]:
         """
@@ -315,10 +315,3 @@ class DefaultPolicy(RetryPolicy):
             return True
 
         return False
-
-
-class RetriesExceeded(Exception):
-    """
-    Represents an exception which is considered retriable, but retry limits
-    were exceeded
-    """
diff --git a/python/pyspark/sql/tests/connect/client/test_client.py 
b/python/pyspark/sql/tests/connect/client/test_client.py
index 12e690c3a30..1ff3a8ea3e8 100644
--- a/python/pyspark/sql/tests/connect/client/test_client.py
+++ b/python/pyspark/sql/tests/connect/client/test_client.py
@@ -31,9 +31,9 @@ if should_test_connect:
     from pyspark.sql.connect.client.retries import (
         Retrying,
         DefaultPolicy,
-        RetriesExceeded,
     )
     from pyspark.sql.connect.client.reattach import 
ExecutePlanResponseReattachableIterator
+    from pyspark.errors import RetriesExceeded
     import pyspark.sql.connect.proto as proto
 
 
diff --git a/python/pyspark/sql/tests/connect/test_connect_basic.py 
b/python/pyspark/sql/tests/connect/test_connect_basic.py
index 5e0cf535391..2431b948f9d 100755
--- a/python/pyspark/sql/tests/connect/test_connect_basic.py
+++ b/python/pyspark/sql/tests/connect/test_connect_basic.py
@@ -31,10 +31,11 @@ from pyspark.errors import (
     PySparkTypeError,
     PySparkException,
     PySparkValueError,
+    RetriesExceeded,
 )
 from pyspark.errors.exceptions.base import SessionNotSameException
 from pyspark.sql import SparkSession as PySparkSession, Row
-from pyspark.sql.connect.client.retries import RetryPolicy, RetriesExceeded
+from pyspark.sql.connect.client.retries import RetryPolicy
 from pyspark.sql.types import (
     StructType,
     StructField,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to