pierrejeambrun commented on a change in pull request #20386:
URL: https://github.com/apache/airflow/pull/20386#discussion_r777540009



##########
File path: airflow/providers/apache/beam/operators/beam.py
##########
@@ -96,7 +100,107 @@ def set_current_dataflow_job_id(job_id):
         )
 
 
-class BeamRunPythonPipelineOperator(BaseOperator, BeamDataflowMixin):
+class BeamBasePipelineOperator(BaseOperator, BeamDataflowMixin, ABC):
+    """
+    Abstract base class for Beam Pipeline Operators.
+
+    :param runner: Runner on which pipeline will be run. By default 
"DirectRunner" is being used.
+        Other possible options: DataflowRunner, SparkRunner, FlinkRunner.
+        See: :class:`~providers.apache.beam.hooks.beam.BeamRunnerType`
+        See: https://beam.apache.org/documentation/runners/capability-matrix/
+
+    :type runner: str
+    :param default_pipeline_options: Map of default pipeline options.
+    :type default_pipeline_options: dict
+    :param pipeline_options: Map of pipeline options.The key must be a 
dictionary.
+        The value can contain different types:
+
+        * If the value is None, the single option - ``--key`` (without value) 
will be added.
+        * If the value is False, this option will be skipped
+        * If the value is True, the single option - ``--key`` (without value) 
will be added.
+        * If the value is list, the many options will be added for each key.
+          If the value is ``['A', 'B']`` and the key is ``key`` then the 
``--key=A --key-B`` options
+          will be left
+        * Other value types will be replaced with the Python textual 
representation.
+
+        When defining labels (``labels`` option), you can also provide a 
dictionary.
+    :type pipeline_options: dict
+    :param gcp_conn_id: Optional.
+        The connection ID to use connecting to Google Cloud Storage if python 
file is on GCS.
+    :type gcp_conn_id: str
+    :param delegate_to:  Optional.
+        The account to impersonate using domain-wide delegation of authority,
+        if any. For this to work, the service account making the request must 
have
+        domain-wide delegation enabled.
+    :type delegate_to: str
+    :param dataflow_config: Dataflow configuration, used when runner type is 
set to DataflowRunner,
+        (optional) defaults to None.
+    :type dataflow_config: DataflowConfiguration
+    """
+
+    def __init__(
+        self,
+        *,
+        runner: str = "DirectRunner",
+        default_pipeline_options: Optional[dict] = None,
+        pipeline_options: Optional[dict] = None,
+        gcp_conn_id: str = "google_cloud_default",
+        delegate_to: Optional[str] = None,
+        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        **kwargs,
+    ) -> None:
+        super().__init__(**kwargs)
+        self.runner = runner
+        self.default_pipeline_options = default_pipeline_options or {}
+        self.pipeline_options = pipeline_options or {}
+        self.pipeline_options.setdefault("labels", {}).update(
+            {"airflow-version": "v" + version.replace(".", "-").replace("+", 
"-")}
+        )
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+        if isinstance(dataflow_config, dict):
+            self.dataflow_config = DataflowConfiguration(**dataflow_config)
+        else:
+            self.dataflow_config = dataflow_config or DataflowConfiguration()
+        self.beam_hook: Optional[BeamHook] = None
+        self.dataflow_hook: Optional[DataflowHook] = None
+        self.dataflow_job_id: Optional[str] = None
+
+        if self.dataflow_config and self.runner.lower() != 
BeamRunnerType.DataflowRunner.lower():
+            self.log.warning(
+                "dataflow_config is defined but runner is different than 
DataflowRunner (%s)", self.runner
+            )
+
+    def _init_pipeline_options(
+        self,
+        format_pipeline_options: bool = False,
+        job_name_variable_key: Optional[str] = None,
+    ) -> Tuple[bool, Optional[str], dict, Optional[Callable[[str], None]]]:
+        self.beam_hook = BeamHook(runner=self.runner)
+        pipeline_options = self.default_pipeline_options.copy()
+        process_line_callback: Optional[Callable[[str], None]] = None
+        is_dataflow = self.runner.lower() == 
BeamRunnerType.DataflowRunner.lower()
+        dataflow_job_name: Optional[str] = None
+
+        if is_dataflow:
+            dataflow_job_name, pipeline_options, process_line_callback = 
self._set_dataflow(
+                pipeline_options=pipeline_options,
+                job_name_variable_key=job_name_variable_key,
+            )
+
+        pipeline_options.update(self.pipeline_options)
+
+        formatted_pipeline_options = pipeline_options

Review comment:
       done




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to