This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1a197bdb61b [SPARK-41939][CONNECT][PYTHON] Add the unsupported list 
for `catalog` functions
1a197bdb61b is described below

commit 1a197bdb61baa951c938f96cd7805c9dc59e87d3
Author: Ruifeng Zheng <ruife...@apache.org>
AuthorDate: Sun Jan 8 16:51:30 2023 +0900

    [SPARK-41939][CONNECT][PYTHON] Add the unsupported list for `catalog` 
functions
    
    ### What changes were proposed in this pull request?
    Add the unsupported list for `catalog` functions
    
    ### Why are the changes needed?
    to explictly tell users they are not implemented
    
    ### Does this PR introduce _any_ user-facing change?
    yes, `NotImplementedError`
    
    ### How was this patch tested?
    added UT
    
    Closes #39455 from zhengruifeng/connect_catalog_unsupported_list.
    
    Authored-by: Ruifeng Zheng <ruife...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 python/pyspark/sql/connect/catalog.py                  | 14 +++++++++++++-
 python/pyspark/sql/tests/connect/test_connect_basic.py | 12 ++++++++++++
 2 files changed, 25 insertions(+), 1 deletion(-)

diff --git a/python/pyspark/sql/connect/catalog.py 
b/python/pyspark/sql/connect/catalog.py
index 790a1b8c000..ec22f9cf30d 100644
--- a/python/pyspark/sql/connect/catalog.py
+++ b/python/pyspark/sql/connect/catalog.py
@@ -14,7 +14,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-from typing import List, Optional, TYPE_CHECKING
+from typing import Any, List, Optional, TYPE_CHECKING
 
 import pandas as pd
 
@@ -306,6 +306,18 @@ class Catalog:
 
     refreshByPath.__doc__ = PySparkCatalog.refreshByPath.__doc__
 
+    def isCached(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("isCached() is not implemented.")
+
+    def cacheTable(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("cacheTable() is not implemented.")
+
+    def uncacheTable(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("uncacheTable() is not implemented.")
+
+    def registerFunction(self, *args: Any, **kwargs: Any) -> None:
+        raise NotImplementedError("registerFunction() is not implemented.")
+
 
 Catalog.__doc__ = PySparkCatalog.__doc__
 
diff --git a/python/pyspark/sql/tests/connect/test_connect_basic.py 
b/python/pyspark/sql/tests/connect/test_connect_basic.py
index 3f82fdb4f4d..cf9472ad4c6 100644
--- a/python/pyspark/sql/tests/connect/test_connect_basic.py
+++ b/python/pyspark/sql/tests/connect/test_connect_basic.py
@@ -2066,6 +2066,18 @@ class SparkConnectBasicTests(SparkConnectSQLTestCase):
             with self.assertRaises(NotImplementedError):
                 getattr(self.connect, f)()
 
+    def test_unsupported_catalog_functions(self):
+        # SPARK-41939: Disable unsupported functions.
+
+        for f in (
+            "isCached",
+            "cacheTable",
+            "uncacheTable",
+            "registerFunction",
+        ):
+            with self.assertRaises(NotImplementedError):
+                getattr(self.connect.catalog, f)()
+
 
 @unittest.skipIf(not should_test_connect, connect_requirement_message)
 class ChannelBuilderTests(ReusedPySparkTestCase):


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to