This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 918552acad prevent templated field logic checks in operators __init__ 
(#36489)
918552acad is described below

commit 918552acad136128ea603d765d8be23d3f9bfcbd
Author: rom sharon <33751805+romsharo...@users.noreply.github.com>
AuthorDate: Fri Dec 29 22:22:12 2023 +0200

    prevent templated field logic checks in operators __init__ (#36489)
---
 .../providers/google/cloud/operators/dataproc.py   | 96 ++++++++++++++++++++--
 1 file changed, 90 insertions(+), 6 deletions(-)

diff --git a/airflow/providers/google/cloud/operators/dataproc.py 
b/airflow/providers/google/cloud/operators/dataproc.py
index ad47a9e151..6faea746e6 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -1297,6 +1297,12 @@ class 
DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
         query: str | None = None,
         query_uri: str | None = None,
         variables: dict | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1308,7 +1314,15 @@ class 
DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -1375,6 +1389,12 @@ class 
DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
         query: str | None = None,
         query_uri: str | None = None,
         variables: dict | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1386,7 +1406,15 @@ class 
DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -1454,6 +1482,12 @@ class 
DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
         query: str | None = None,
         query_uri: str | None = None,
         variables: dict | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1465,7 +1499,15 @@ class 
DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.query = query
         self.query_uri = query_uri
         self.variables = variables
@@ -1535,6 +1577,12 @@ class 
DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
         arguments: list | None = None,
         archives: list | None = None,
         files: list | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1546,7 +1594,15 @@ class 
DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.main_jar = main_jar
         self.main_class = main_class
         self.arguments = arguments
@@ -1612,6 +1668,12 @@ class 
DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
         arguments: list | None = None,
         archives: list | None = None,
         files: list | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1623,7 +1685,15 @@ class 
DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.main_jar = main_jar
         self.main_class = main_class
         self.arguments = arguments
@@ -1712,6 +1782,12 @@ class 
DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
         archives: list | None = None,
         pyfiles: list | None = None,
         files: list | None = None,
+        impersonation_chain: str | Sequence[str] | None = None,
+        region: str,
+        job_name: str = "{{task.task_id}}_{{ds_nodash}}",
+        cluster_name: str = "cluster-1",
+        dataproc_properties: dict | None = None,
+        dataproc_jars: list[str] | None = None,
         **kwargs,
     ) -> None:
         # TODO: Remove one day
@@ -1723,7 +1799,15 @@ class 
DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
             stacklevel=1,
         )
 
-        super().__init__(**kwargs)
+        super().__init__(
+            impersonation_chain=impersonation_chain,
+            region=region,
+            job_name=job_name,
+            cluster_name=cluster_name,
+            dataproc_properties=dataproc_properties,
+            dataproc_jars=dataproc_jars,
+            **kwargs,
+        )
         self.main = main
         self.arguments = arguments
         self.archives = archives

Reply via email to