Lee-W commented on code in PR #37693:
URL: https://github.com/apache/airflow/pull/37693#discussion_r1559298831


##########
airflow/providers/google/cloud/triggers/dataflow.py:
##########
@@ -142,3 +152,508 @@ def _get_async_hook(self) -> AsyncDataflowHook:
             impersonation_chain=self.impersonation_chain,
             cancel_timeout=self.cancel_timeout,
         )
+
+
+class DataflowJobStatusTrigger(BaseTrigger):
+    """
+    Trigger that checks for metrics associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param expected_statuses: The expected state(s) of the operation.
+        See: 
https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        expected_statuses: set[str],
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+    ):
+        super().__init__()
+        self.job_id = job_id
+        self.expected_statuses = expected_statuses
+        self.project_id = project_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobStatusTrigger",
+            {
+                "job_id": self.job_id,
+                "expected_statuses": self.expected_statuses,
+                "project_id": self.project_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until the job reaches an expected or terminal state.
+
+        Yields a TriggerEvent with success status, if the client returns an 
expected job status.
+
+        Yields a TriggerEvent with error status, if the client returns an 
unexpected terminal
+        job status or any exception is raised while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                if job_status.name in self.expected_statuses:
+                    yield TriggerEvent(
+                        {
+                            "status": "success",
+                            "message": f"Job with id '{self.job_id}' has 
reached an expected state: {job_status.name}",
+                        }
+                    )
+                    return
+                elif job_status.name in DataflowJobStatus.TERMINAL_STATES:
+                    yield TriggerEvent(
+                        {
+                            "status": "error",
+                            "message": f"Job with id '{self.job_id}' is 
already in terminal state: {job_status.name}",
+                        }
+                    )
+                    return
+                self.log.info("Sleeping for %s seconds.", self.poll_sleep)
+                await asyncio.sleep(self.poll_sleep)
+        except Exception as e:
+            self.log.error("Exception occurred while checking for job status!")
+            yield TriggerEvent(
+                {
+                    "status": "error",
+                    "message": str(e),
+                }
+            )
+
+    @cached_property
+    def async_hook(self) -> AsyncDataflowHook:
+        return AsyncDataflowHook(
+            gcp_conn_id=self.gcp_conn_id,
+            poll_sleep=self.poll_sleep,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+
+class DataflowJobMetricsTrigger(BaseTrigger):
+    """
+    Trigger that checks for metrics associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param fail_on_terminal_state: If set to True the trigger will yield a 
TriggerEvent with
+        error status if the job reaches a terminal state.
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_terminal_state: bool = True,
+    ):
+        super().__init__()
+        self.project_id = project_id
+        self.job_id = job_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_terminal_state = fail_on_terminal_state
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobMetricsTrigger",
+            {
+                "project_id": self.project_id,
+                "job_id": self.job_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+                "fail_on_terminal_state": self.fail_on_terminal_state,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until a terminal job status or any job metrics are returned.
+
+        Yields a TriggerEvent with success status, if the client returns any 
job metrics
+        and fail_on_terminal_state attribute is False.
+
+        Yields a TriggerEvent with error status, if the client returns a job 
status with
+        a terminal state value and fail_on_terminal_state attribute is True.
+
+        Yields a TriggerEvent with error status, if any exception is raised 
while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                job_metrics = await self.get_job_metrics()
+                if 
self.loop_must_fail_when_job_is_in_terminal_state(job_status):
+                    yield TriggerEvent(
+                        {
+                            "status": "error",
+                            "message": f"Job with id '{self.job_id}' is 
already in terminal state: {job_status.name}",
+                            "result": None,
+                        }
+                    )
+                    return
+                if job_metrics:
+                    yield TriggerEvent(
+                        {
+                            "status": "success",
+                            "message": f"Detected {len(job_metrics)} metrics 
for job '{self.job_id}'",
+                            "result": job_metrics,
+                        }
+                    )
+                    return
+                self.log.info("Sleeping for %s seconds.", self.poll_sleep)
+                await asyncio.sleep(self.poll_sleep)
+        except Exception as e:
+            self.log.error("Exception occurred while checking for job's 
metrics!")
+            yield TriggerEvent({"status": "error", "message": str(e), 
"result": None})
+
+    async def get_job_metrics(self) -> list[dict[str, Any]]:
+        """Wait for the Dataflow client response and then return it in a 
serialized list."""
+        job_response: JobMetrics = await self.async_hook.get_job_metrics(
+            job_id=self.job_id,
+            project_id=self.project_id,
+            location=self.location,
+        )
+        return self._get_metrics_from_job_response(job_response)
+
+    def loop_must_fail_when_job_is_in_terminal_state(self, job_status: 
JobState) -> bool:
+        if self.fail_on_terminal_state and job_status.name in 
DataflowJobStatus.TERMINAL_STATES:
+            return True
+        return False
+
+    def _get_metrics_from_job_response(self, job_response: JobMetrics) -> 
list[dict[str, Any]]:
+        """Return a list of serialized MetricUpdate objects."""
+        return [MetricUpdate.to_dict(metric) for metric in 
job_response.metrics]
+
+    @cached_property
+    def async_hook(self) -> AsyncDataflowHook:
+        return AsyncDataflowHook(
+            gcp_conn_id=self.gcp_conn_id,
+            poll_sleep=self.poll_sleep,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+
+class DataflowJobAutoScalingEventTrigger(BaseTrigger):
+    """
+    Trigger that checks for autoscaling events associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param fail_on_terminal_state: If set to True the trigger will yield a 
TriggerEvent with
+        error status if the job reaches a terminal state.
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_terminal_state: bool = True,
+    ):
+        super().__init__()
+        self.project_id = project_id
+        self.job_id = job_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_terminal_state = fail_on_terminal_state
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobAutoScalingEventTrigger",
+            {
+                "project_id": self.project_id,
+                "job_id": self.job_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+                "fail_on_terminal_state": self.fail_on_terminal_state,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until a terminal job status or any autoscaling events are 
returned.
+
+        Yields a TriggerEvent with success status, if the client returns any 
autoscaling events
+        and fail_on_terminal_state attribute is False.
+
+        Yields a TriggerEvent with error status, if the client returns a job 
status with
+        a terminal state value and fail_on_terminal_state attribute is True.
+
+        Yields a TriggerEvent with error status, if any exception is raised 
while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                autoscaling_events = await self.list_job_autoscaling_events()
+                if 
self.loop_must_fail_when_job_is_in_terminal_state(job_status):
+                    yield TriggerEvent(
+                        {
+                            "status": "error",
+                            "message": f"Job with id '{self.job_id}' is 
already in terminal state: {job_status.name}",
+                            "result": None,
+                        }
+                    )
+                    return
+                if autoscaling_events:
+                    yield TriggerEvent(
+                        {
+                            "status": "success",
+                            "message": f"Detected {len(autoscaling_events)} 
autoscaling events for job '{self.job_id}'",
+                            "result": autoscaling_events,
+                        }
+                    )
+                    return
+                self.log.info("Sleeping for %s seconds.", self.poll_sleep)
+                await asyncio.sleep(self.poll_sleep)
+        except Exception as e:
+            self.log.error("Exception occurred while checking for job's 
autoscaling events!")
+            yield TriggerEvent({"status": "error", "message": str(e), 
"result": None})
+
+    async def list_job_autoscaling_events(self) -> list[dict[str, str | dict]]:
+        """Wait for the Dataflow client response and then return it in a 
serialized list."""
+        job_response: ListJobMessagesAsyncPager = await 
self.async_hook.list_job_messages(
+            job_id=self.job_id,
+            project_id=self.project_id,
+            location=self.location,
+        )
+        return self._get_autoscaling_events_from_job_response(job_response)
+
+    def loop_must_fail_when_job_is_in_terminal_state(self, job_status: 
JobState) -> bool:
+        if self.fail_on_terminal_state and job_status.name in 
DataflowJobStatus.TERMINAL_STATES:
+            return True
+        return False
+
+    def _get_autoscaling_events_from_job_response(
+        self, job_response: ListJobMessagesAsyncPager
+    ) -> list[dict[str, str | dict]]:
+        """Return a list of serialized AutoscalingEvent objects."""
+        return [AutoscalingEvent.to_dict(event) for event in 
job_response.autoscaling_events]
+
+    @cached_property
+    def async_hook(self) -> AsyncDataflowHook:
+        return AsyncDataflowHook(
+            gcp_conn_id=self.gcp_conn_id,
+            poll_sleep=self.poll_sleep,
+            impersonation_chain=self.impersonation_chain,
+        )
+
+
+class DataflowJobMessagesTrigger(BaseTrigger):
+    """
+    Trigger that checks for job messages associated with a Dataflow job.
+
+    :param job_id: Required. ID of the job.
+    :param project_id: Required. The Google Cloud project ID in which the job 
was started.
+    :param location: Optional. The location where the job is executed. If set 
to None then
+        the value of DEFAULT_DATAFLOW_LOCATION will be used.
+    :param gcp_conn_id: The connection ID to use for connecting to Google 
Cloud.
+    :param poll_sleep: Time (seconds) to wait between two consecutive calls to 
check the job.
+    :param impersonation_chain: Optional. Service account to impersonate using 
short-term
+        credentials, or chained list of accounts required to get the 
access_token
+        of the last account in the list, which will be impersonated in the 
request.
+        If set as a string, the account must grant the originating account
+        the Service Account Token Creator IAM role.
+        If set as a sequence, the identities from the list must grant
+        Service Account Token Creator IAM role to the directly preceding 
identity, with first
+        account from the list granting this role to the originating account 
(templated).
+    :param fail_on_terminal_state: If set to True the trigger will yield a 
TriggerEvent with
+        error status if the job reaches a terminal state.
+    """
+
+    def __init__(
+        self,
+        job_id: str,
+        project_id: str | None,
+        location: str = DEFAULT_DATAFLOW_LOCATION,
+        gcp_conn_id: str = "google_cloud_default",
+        poll_sleep: int = 10,
+        impersonation_chain: str | Sequence[str] | None = None,
+        fail_on_terminal_state: bool = True,
+    ):
+        super().__init__()
+        self.project_id = project_id
+        self.job_id = job_id
+        self.location = location
+        self.gcp_conn_id = gcp_conn_id
+        self.poll_sleep = poll_sleep
+        self.impersonation_chain = impersonation_chain
+        self.fail_on_terminal_state = fail_on_terminal_state
+
+    def serialize(self) -> tuple[str, dict[str, Any]]:
+        """Serialize class arguments and classpath."""
+        return (
+            
"airflow.providers.google.cloud.triggers.dataflow.DataflowJobMessagesTrigger",
+            {
+                "project_id": self.project_id,
+                "job_id": self.job_id,
+                "location": self.location,
+                "gcp_conn_id": self.gcp_conn_id,
+                "poll_sleep": self.poll_sleep,
+                "impersonation_chain": self.impersonation_chain,
+                "fail_on_terminal_state": self.fail_on_terminal_state,
+            },
+        )
+
+    async def run(self):
+        """
+        Loop until a terminal job status or any job messages are returned.
+
+        Yields a TriggerEvent with success status, if the client returns any 
job messages
+        and fail_on_terminal_state attribute is False.
+
+        Yields a TriggerEvent with error status, if the client returns a job 
status with
+        a terminal state value and fail_on_terminal_state attribute is True.
+
+        Yields a TriggerEvent with error status, if any exception is raised 
while looping.
+
+        In any other case the Trigger will wait for a specified amount of time
+        stored in self.poll_sleep variable.
+        """
+        try:
+            while True:
+                job_status = await self.async_hook.get_job_status(
+                    job_id=self.job_id,
+                    project_id=self.project_id,
+                    location=self.location,
+                )
+                job_messages = await self.list_job_messages()
+                if 
self.loop_must_fail_when_job_is_in_terminal_state(job_status):

Review Comment:
   > Do you think it would be better to move this code outside of the class?
   
   If they're the same thing across different classes, I think we could move it 
outside of class or make another base class with this method.
   
   > As for the name, I chose a longer one because I had a hard time expressing 
what this method really does. 
   
   Hmmm... if that's the case, I think we probably should stick with the 
original if statement. I feel like the function name does not enhance the 
readbility 🤔 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to