This is an automated email from the ASF dual-hosted git repository.

jrmccluskey pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new a15de20849a Remove standalone uses of the apitools HttpError class 
(#37296)
a15de20849a is described below

commit a15de20849a6c8b429e13e2bce1be10afb5218c5
Author: Jack McCluskey <[email protected]>
AuthorDate: Thu Jan 15 11:58:42 2026 -0500

    Remove standalone uses of the apitools HttpError class (#37296)
    
    * Remove standalone uses of the apitools HttpError class
    
    * Update sdks/python/apache_beam/testing/pipeline_verifiers_test.py
    
    Co-authored-by: gemini-code-assist[bot] 
<176961590+gemini-code-assist[bot]@users.noreply.github.com>
    
    * linting and formatting
    
    * more linting and formatting
    
    * even more linting
    
    ---------
    
    Co-authored-by: gemini-code-assist[bot] 
<176961590+gemini-code-assist[bot]@users.noreply.github.com>
---
 sdks/python/apache_beam/dataframe/io_it_test.py         |  7 ++++---
 sdks/python/apache_beam/dataframe/io_test.py            |  7 ++++---
 .../apache_beam/io/gcp/datastore/v1new/datastoreio.py   |  8 --------
 .../python/apache_beam/io/gcp/experimental/spannerio.py | 11 -----------
 sdks/python/apache_beam/testing/pipeline_verifiers.py   |  6 +++---
 .../apache_beam/testing/pipeline_verifiers_test.py      | 17 ++++++++---------
 6 files changed, 19 insertions(+), 37 deletions(-)

diff --git a/sdks/python/apache_beam/dataframe/io_it_test.py 
b/sdks/python/apache_beam/dataframe/io_it_test.py
index 9f750e2ff58..da88ffb5476 100644
--- a/sdks/python/apache_beam/dataframe/io_it_test.py
+++ b/sdks/python/apache_beam/dataframe/io_it_test.py
@@ -33,12 +33,13 @@ from apache_beam.testing.util import equal_to
 _LOGGER = logging.getLogger(__name__)
 
 try:
-  from apitools.base.py.exceptions import HttpError
+  from google.api_core.exceptions import GoogleAPICallError
 except ImportError:
-  HttpError = None
+  GoogleAPICallError = None
 
 
[email protected](HttpError is None, 'GCP dependencies are not installed')
[email protected](
+    GoogleAPICallError is None, 'GCP dependencies are not installed')
 class ReadUsingReadGbqTests(unittest.TestCase):
   @pytest.mark.it_postcommit
   def test_ReadGbq(self):
diff --git a/sdks/python/apache_beam/dataframe/io_test.py 
b/sdks/python/apache_beam/dataframe/io_test.py
index 92bb10225c7..313d955b455 100644
--- a/sdks/python/apache_beam/dataframe/io_test.py
+++ b/sdks/python/apache_beam/dataframe/io_test.py
@@ -47,9 +47,9 @@ from apache_beam.testing.util import assert_that
 from apache_beam.testing.util import equal_to
 
 try:
-  from apitools.base.py.exceptions import HttpError
+  from google.api_core.exceptions import GoogleAPICallError
 except ImportError:
-  HttpError = None
+  GoogleAPICallError = None
 
 # Get major, minor version
 PD_VERSION = tuple(map(int, pd.__version__.split('.')[0:2]))
@@ -440,7 +440,8 @@ X     , c1, c2
                           set(self.read_all_lines(output + 'out2.csv*')))
 
 
[email protected](HttpError is None, 'GCP dependencies are not installed')
[email protected](
+    GoogleAPICallError is None, 'GCP dependencies are not installed')
 class ReadGbqTransformTests(unittest.TestCase):
   @mock.patch.object(BigQueryWrapper, 'get_table')
   def test_bad_schema_public_api_direct_read(self, get_table):
diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py 
b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
index 6f870b7cfeb..32b79c8f10f 100644
--- a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
+++ b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py
@@ -53,7 +53,6 @@ from apache_beam.utils import retry
 # Protect against environments where datastore library is not available.
 # pylint: disable=wrong-import-order, wrong-import-position
 try:
-  from apitools.base.py.exceptions import HttpError
   from google.api_core.exceptions import ClientError
   from google.api_core.exceptions import GoogleAPICallError
 except ImportError:
@@ -309,9 +308,6 @@ class ReadFromDatastore(PTransform):
         # e.code.value contains the numeric http status code.
         service_call_metric.call(e.code.value)
         raise
-      except HttpError as e:
-        service_call_metric.call(e)
-        raise
 
 
 class _Mutate(PTransform):
@@ -469,10 +465,6 @@ class _Mutate(PTransform):
         service_call_metric.call(e.code.value)
         rpc_stats_callback(errors=1)
         raise
-      except HttpError as e:
-        service_call_metric.call(e)
-        rpc_stats_callback(errors=1)
-        raise
 
     def process(self, element):
       client_element = self.element_to_client_batch_item(element)
diff --git a/sdks/python/apache_beam/io/gcp/experimental/spannerio.py 
b/sdks/python/apache_beam/io/gcp/experimental/spannerio.py
index c94c43a637d..04800ff015c 100644
--- a/sdks/python/apache_beam/io/gcp/experimental/spannerio.py
+++ b/sdks/python/apache_beam/io/gcp/experimental/spannerio.py
@@ -196,7 +196,6 @@ from apache_beam.utils.annotations import deprecated
 # pylint: disable=wrong-import-order, wrong-import-position, ungrouped-imports
 # pylint: disable=unused-import
 try:
-  from apitools.base.py.exceptions import HttpError
   from google.api_core.exceptions import ClientError
   from google.api_core.exceptions import GoogleAPICallError
   from google.cloud.spanner import Client
@@ -437,9 +436,6 @@ class _NaiveSpannerReadDoFn(DoFn):
       except (ClientError, GoogleAPICallError) as e:
         metric_action(metric_id, e.code.value)
         raise
-      except HttpError as e:
-        metric_action(metric_id, e)
-        raise
 
 
 @with_input_types(ReadOperation)
@@ -668,9 +664,6 @@ class _ReadFromPartitionFn(DoFn):
     except (ClientError, GoogleAPICallError) as e:
       self.service_metric(str(e.code.value))
       raise
-    except HttpError as e:
-      self.service_metric(str(e))
-      raise
 
   def teardown(self):
     if self._snapshot:
@@ -1271,10 +1264,6 @@ class _WriteToSpannerDoFn(DoFn):
       for service_metric in self.service_metrics.values():
         service_metric.call(str(e.code.value))
       raise
-    except HttpError as e:
-      for service_metric in self.service_metrics.values():
-        service_metric.call(str(e))
-      raise
     else:
       for service_metric in self.service_metrics.values():
         service_metric.call('ok')
diff --git a/sdks/python/apache_beam/testing/pipeline_verifiers.py 
b/sdks/python/apache_beam/testing/pipeline_verifiers.py
index 225e6d0dbae..01929420a23 100644
--- a/sdks/python/apache_beam/testing/pipeline_verifiers.py
+++ b/sdks/python/apache_beam/testing/pipeline_verifiers.py
@@ -41,9 +41,9 @@ __all__ = [
 ]
 
 try:
-  from apitools.base.py.exceptions import HttpError
+  from google.api_core.exceptions import GoogleAPICallError
 except ImportError:
-  HttpError = None
+  GoogleAPICallError = None  # type: ignore
 
 MAX_RETRIES = 4
 
@@ -76,7 +76,7 @@ class PipelineStateMatcher(BaseMatcher):
 def retry_on_io_error_and_server_error(exception):
   """Filter allowing retries on file I/O errors and service error."""
   return isinstance(exception, IOError) or \
-          (HttpError is not None and isinstance(exception, HttpError))
+          (GoogleAPICallError is not None and isinstance(exception, 
GoogleAPICallError)) # pylint: disable=line-too-long
 
 
 class FileChecksumMatcher(BaseMatcher):
diff --git a/sdks/python/apache_beam/testing/pipeline_verifiers_test.py 
b/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
index cc286c33aaa..35ca27d452e 100644
--- a/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
+++ b/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
@@ -37,11 +37,13 @@ from apache_beam.testing.test_utils import patch_retry
 try:
   # pylint: disable=wrong-import-order, wrong-import-position
   # pylint: disable=ungrouped-imports
-  from apitools.base.py.exceptions import HttpError
+  from google.api_core.exceptions import GoogleAPICallError
+  from google.api_core.exceptions import NotFound
 
   from apache_beam.io.gcp.gcsfilesystem import GCSFileSystem
 except ImportError:
-  HttpError = None
+  GoogleAPICallError = None  # type: ignore
+  NotFound = None  # type: ignore
   GCSFileSystem = None  # type: ignore
 
 
@@ -122,15 +124,12 @@ class PipelineVerifiersTest(unittest.TestCase):
     self.assertEqual(verifiers.MAX_RETRIES + 1, mock_match.call_count)
 
   @patch.object(GCSFileSystem, 'match')
-  @unittest.skipIf(HttpError is None, 'google-apitools is not installed')
+  @unittest.skipIf(
+      GoogleAPICallError is None, 'GCP dependencies are not installed')
   def test_file_checksum_matcher_service_error(self, mock_match):
-    mock_match.side_effect = HttpError(
-        response={'status': '404'},
-        url='',
-        content='Not Found',
-    )
+    mock_match.side_effect = NotFound('Not Found')
     matcher = verifiers.FileChecksumMatcher('gs://dummy/path', Mock())
-    with self.assertRaises(HttpError):
+    with self.assertRaises(NotFound):
       hc_assert_that(self._mock_result, matcher)
     self.assertTrue(mock_match.called)
     self.assertEqual(verifiers.MAX_RETRIES + 1, mock_match.call_count)

Reply via email to