e-galan commented on code in PR #37693:
URL: https://github.com/apache/airflow/pull/37693#discussion_r1555475918


##########
airflow/providers/google/cloud/triggers/dataflow.py:
##########
@@ -142,3 +152,496 @@ def _get_async_hook(self) -> AsyncDataflowHook:
             impersonation_chain=self.impersonation_chain,
             cancel_timeout=self.cancel_timeout,
         )
+
+
+class DataflowJobStatusTrigger(BaseTrigger):
+    """
+    Trigger that checks for metrics associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param expected_statuses: The expected state(s) of the operation.
+        See: 
https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        expected_statuses: set[str],
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+    ):
+        super().__init__()
+        self.job_id = job_id
+        self.expected_statuses = expected_statuses
+        self.project_id = project_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobStatusTrigger",
+            {
+                "job_id": self.job_id,
+                "expected_statuses": self.expected_statuses,
+                "project_id": self.project_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until the job reaches an expected or terminal state.
+
+        Yields a TriggerEvent with success status, if the client returns an 
expected job status.
+
+        Yields a TriggerEvent with error status, if the client returns an 
unexpected terminal
+        job status or any exception is raised while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                if job_status.name in self.expected_statuses:
+                    yield TriggerEvent(
+                        {
+                            "status": "success",
+                            "message": f"Job with id '{self.job_id}' has 
reached an expected state: {job_status.name}",
+                        }
+                    )
+                    return
+                elif job_status.name in DataflowJobStatus.TERMINAL_STATES:
+                    yield TriggerEvent(
+                        {
+                            "status": "error",
+                            "message": f"Job with id '{self.job_id}' is 
already in terminal state: {job_status.name}",
+                        }
+                    )
+                    return
+                self.log.info("Sleeping for %s seconds.", self.poll_sleep)
+                await asyncio.sleep(self.poll_sleep)
+        except Exception as e:
+            self.log.error("Exception occurred while checking for job status!")
+            yield TriggerEvent(
+                {
+                    "status": "error",
+                    "message": str(e),
+                }
+            )
+
+    @cached_property
+    def async_hook(self) -> AsyncDataflowHook:
+        return AsyncDataflowHook(
+            gcp_conn_id=self.gcp_conn_id,
+            poll_sleep=self.poll_sleep,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+
+class DataflowJobMetricsTrigger(BaseTrigger):
+    """
+    Trigger that checks for metrics associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param fail_on_terminal_state: If set to True the trigger will yield a 
TriggerEvent with
+        error status if the job reaches a terminal state.
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_terminal_state: bool = True,
+    ):
+        super().__init__()
+        self.project_id = project_id
+        self.job_id = job_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_terminal_state = fail_on_terminal_state
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobMetricsTrigger",
+            {
+                "project_id": self.project_id,
+                "job_id": self.job_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+                "fail_on_terminal_state": self.fail_on_terminal_state,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until a terminal job status or any job metrics are returned.
+
+        Yields a TriggerEvent with success status, if the client returns any 
job metrics
+        and fail_on_terminal_state attribute is False.
+
+        Yields a TriggerEvent with error status, if the client returns a job 
status with
+        a terminal state value and fail_on_terminal_state attribute is True.
+
+        Yields a TriggerEvent with error status, if any exception is raised 
while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                job_metrics = await self.get_job_metrics()
+                if self.fail_on_terminal_state:
+                    if job_status.name in DataflowJobStatus.TERMINAL_STATES:

Review Comment:
   Makes sense, moved it into a method.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to