GaetanLepage commented on issue #33854:
URL: https://github.com/apache/beam/issues/33854#issuecomment-3321807582
On apache-beam 2.68.0, we have one occurence left:
```
______________ PipelineTest.test_runner_overrides_default_pickler
______________
[gw40] linux -- Python 3.13.7
/nix/store/829wb290i87wngxlh404klwxql5v18p4-python3-3.13.7/bin/python3.13
self = <apache_beam.pipeline_test.PipelineTest
testMethod=test_runner_overrides_default_pickler>
mock_info = <MagicMock name='info' id='140719519900944'>
@mock.patch('logging.info')
def test_runner_overrides_default_pickler(self, mock_info):
with mock.patch.object(PipelineRunner,
'default_pickle_library_override') as mock_fn:
mock_fn.return_value = 'dill'
> with TestPipeline() as pipeline:
^^^^^^^^^^^^^^
apache_beam/pipeline_test.py:184:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _
apache_beam/pipeline.py:666: in __exit__
self.result = self.run()
^^^^^^^^^^
apache_beam/testing/test_pipeline.py:118: in run
result = super().run(
apache_beam/pipeline.py:613: in run
return Pipeline.from_runner_api(
apache_beam/pipeline.py:1103: in from_runner_api
p.transforms_stack = [context.transforms.get_by_id(root_transform_id)]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
apache_beam/runners/pipeline_context.py:106: in get_by_id
self._id_to_obj[id] = self._obj_type.from_runner_api(
apache_beam/pipeline.py:1530: in from_runner_api
part = context.transforms.get_by_id(transform_id)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
apache_beam/runners/pipeline_context.py:106: in get_by_id
self._id_to_obj[id] = self._obj_type.from_runner_api(
apache_beam/pipeline.py:1530: in from_runner_api
part = context.transforms.get_by_id(transform_id)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
apache_beam/runners/pipeline_context.py:106: in get_by_id
self._id_to_obj[id] = self._obj_type.from_runner_api(
apache_beam/pipeline.py:1530: in from_runner_api
part = context.transforms.get_by_id(transform_id)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
apache_beam/runners/pipeline_context.py:106: in get_by_id
self._id_to_obj[id] = self._obj_type.from_runner_api(
apache_beam/pipeline.py:1516: in from_runner_api
result = AppliedPTransform(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _
self = AppliedPTransform(assert_that/Create/MaybeReshuffle, MaybeReshuffle)
parent = None
transform =
<apache_beam.transforms.core.Create.expand.<locals>.MaybeReshuffle object at
0x7ffbd03a6900>
full_label = 'assert_that/Create/MaybeReshuffle'
main_inputs = {'None': <PCollection[assert_that/Create/FlatMap(<lambda at
core.py:4094>).None] at 0x7ffbd041bdd0>}
environment_id = None
annotations = {'python_type':
b'apache_beam.transforms.core.Create.expand.<locals>.MaybeReshuffle'}
def __init__(
self,
parent, # type: Optional[AppliedPTransform]
transform, # type: Optional[ptransform.PTransform]
full_label, # type: str
main_inputs, # type: Optional[Mapping[str, Union[pvalue.PBegin,
pvalue.PCollection]]]
environment_id, # type: Optional[str]
annotations, # type: Optional[Dict[str, bytes]]
):
# type: (...) -> None
self.parent = parent
self.transform = transform
# Note that we want the PipelineVisitor classes to use the full_label,
# inputs, side_inputs, and outputs fields from this instance instead
of the
# ones of the PTransform instance associated with it. Doing this
permits
# reusing PTransform instances in different contexts (apply() calls)
without
# any interference. This is particularly useful for composite
transforms.
self.full_label = full_label
self.main_inputs = dict(main_inputs or {})
> self.side_inputs = tuple() if transform is None else
transform.side_inputs
^^^^^^^^^^^^^^^^^^^^^
E AttributeError: 'MaybeReshuffle' object has no attribute 'side_inputs'
apache_beam/pipeline.py:1229: AttributeError
```
https://github.com/NixOS/nixpkgs/pull/445349
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]