hussein-awala commented on code in PR #32498:
URL: https://github.com/apache/airflow/pull/32498#discussion_r1258922424


##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence

Review Comment:
   ```suggestion
   from typing import TYPE_CHECKING, Sequence
   ```



##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence
+
+from airflow import AirflowException
+from airflow.decorators.base import cached_property

Review Comment:
   ```suggestion
   from functools import cached_property
   ```



##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence
+
+from airflow import AirflowException
+from airflow.decorators.base import cached_property
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
+from airflow.providers.amazon.aws.utils import trim_none_values
+from airflow.utils.context import Context

Review Comment:
   ```suggestion
   
   if TYPE_CHECKING:
       from airflow.utils.context import Context
   ```



##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence
+
+from airflow import AirflowException
+from airflow.decorators.base import cached_property
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
+from airflow.providers.amazon.aws.utils import trim_none_values
+from airflow.utils.context import Context
+
+
+class EventBridgePutEventsOperator(BaseOperator):
+    """
+    Put Events onto Amazon EventBridge.
+
+    :param entries: the list of events to be put onto EventBridge, each event 
is a dict (required)
+    :param endpoint_id: the URL subdomain of the endpoint
+    :param aws_conn_id: the AWS connection to use
+    :param region_name: the region where events are to be sent
+
+    """
+
+    template_fields: Sequence[str] = ("entries", "endpoint_id", "aws_conn_id", 
"region_name")
+
+    def __init__(
+        self,
+        *,
+        entries: list[dict],
+        endpoint_id: str | None = None,
+        aws_conn_id: str = "aws_default",
+        region_name: str | None = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+        self.entries = entries
+        self.endpoint_id = endpoint_id
+        self.aws_conn_id = aws_conn_id
+        self.region_name = region_name
+
+    @cached_property
+    def hook(self) -> EventBridgeHook:
+        """Create and return an EventBridgeHook."""
+        return EventBridgeHook(aws_conn_id=self.aws_conn_id, 
region_name=self.region_name)
+
+    def execute(self, context: Context):
+
+        response = self.hook.conn.put_events(
+            **trim_none_values(
+                {
+                    "Entries": self.entries,
+                    "EndpointId": self.endpoint_id,
+                }
+            )
+        )
+
+        self.log.info("Sent %d events to EventBridge.", len(self.entries))
+
+        # If events have failed, log those error codes and messages to 
console, and raise an exception.
+
+        if "FailedEntryCount" in response:
+            self.log.error("Some events have failed to send.")
+            for event in response["Entries"]:
+                if "ErrorCode" in event:
+                    self.log.error(event)

Review Comment:
   I agree, we should only log events with `ErrorCode`



##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence
+
+from airflow import AirflowException
+from airflow.decorators.base import cached_property
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
+from airflow.providers.amazon.aws.utils import trim_none_values
+from airflow.utils.context import Context
+
+
+class EventBridgePutEventsOperator(BaseOperator):
+    """
+    Put Events onto Amazon EventBridge.
+
+    :param entries: the list of events to be put onto EventBridge, each event 
is a dict (required)
+    :param endpoint_id: the URL subdomain of the endpoint
+    :param aws_conn_id: the AWS connection to use
+    :param region_name: the region where events are to be sent
+
+    """
+
+    template_fields: Sequence[str] = ("entries", "endpoint_id", "aws_conn_id", 
"region_name")
+
+    def __init__(
+        self,
+        *,
+        entries: list[dict],
+        endpoint_id: str | None = None,
+        aws_conn_id: str = "aws_default",
+        region_name: str | None = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+        self.entries = entries
+        self.endpoint_id = endpoint_id
+        self.aws_conn_id = aws_conn_id
+        self.region_name = region_name
+
+    @cached_property
+    def hook(self) -> EventBridgeHook:
+        """Create and return an EventBridgeHook."""
+        return EventBridgeHook(aws_conn_id=self.aws_conn_id, 
region_name=self.region_name)
+
+    def execute(self, context: Context):
+
+        response = self.hook.conn.put_events(
+            **trim_none_values(
+                {
+                    "Entries": self.entries,
+                    "EndpointId": self.endpoint_id,
+                }
+            )
+        )
+
+        self.log.info("Sent %d events to EventBridge.", len(self.entries))
+
+        # If events have failed, log those error codes and messages to 
console, and raise an exception.
+
+        if "FailedEntryCount" in response:

Review Comment:
   From AWS API doc, we could have this repsonse:
   ```json
   {
       "FailedEntryCount": 0, 
       "Entries": [
           {
               "EventId": "11710aed-b79e-4468-a20b-bb3c0c3b4860"
           }, 
           {
               "EventId": "d804d26a-88db-4b66-9eaf-9a11c708ae82"
           }
       ]
   }
   ```
   so you check is not valid, instead you can use:
   ```suggestion
           if response.get("FailedEntryCount"):
   ```



##########
tests/providers/amazon/aws/operators/test_eventbridge.py:
##########
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from unittest import mock
+from unittest.mock import MagicMock
+
+import pytest
+
+from airflow import AirflowException
+from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
+from airflow.providers.amazon.aws.operators.eventbridge import 
EventBridgePutEventsOperator
+
+TASK_ID = "events_putevents_job"
+ENTRIES = [{"Detail": "test-detail", "Source": "test-source", "DetailType": 
"test-detail-type"}]
+FAILED_ENTRIES_RESPONSE = [{"ErrorCode": "test_code"}, {"ErrorCode": 
"test_code"}]
+
+
+class TestEventBridgePutEventsOperator:
+    def test_init(self):
+        operator = EventBridgePutEventsOperator(
+            task_id=TASK_ID,
+            entries=ENTRIES,
+        )
+
+        assert operator.entries == ENTRIES
+
+    @mock.patch.object(EventBridgeHook, "conn")
+    def test_execute(self, mock_conn: MagicMock):
+        hook_response = {"Entries": [{"EventId": "foobar"}]}

Review Comment:
   not a valid response:
   ```suggestion
           hook_response = {"FailedEntryCount": 0, "Entries": [{"EventId": 
"foobar"}]}
   ```



##########
airflow/providers/amazon/aws/operators/eventbridge.py:
##########
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Sequence
+
+from airflow import AirflowException
+from airflow.decorators.base import cached_property
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
+from airflow.providers.amazon.aws.utils import trim_none_values
+from airflow.utils.context import Context
+
+
+class EventBridgePutEventsOperator(BaseOperator):
+    """
+    Put Events onto Amazon EventBridge.
+
+    :param entries: the list of events to be put onto EventBridge, each event 
is a dict (required)
+    :param endpoint_id: the URL subdomain of the endpoint
+    :param aws_conn_id: the AWS connection to use
+    :param region_name: the region where events are to be sent
+
+    """
+
+    template_fields: Sequence[str] = ("entries", "endpoint_id", "aws_conn_id", 
"region_name")
+
+    def __init__(
+        self,
+        *,
+        entries: list[dict],
+        endpoint_id: str | None = None,
+        aws_conn_id: str = "aws_default",
+        region_name: str | None = None,
+        **kwargs,
+    ):
+        super().__init__(**kwargs)
+        self.entries = entries
+        self.endpoint_id = endpoint_id
+        self.aws_conn_id = aws_conn_id
+        self.region_name = region_name
+
+    @cached_property
+    def hook(self) -> EventBridgeHook:
+        """Create and return an EventBridgeHook."""
+        return EventBridgeHook(aws_conn_id=self.aws_conn_id, 
region_name=self.region_name)
+
+    def execute(self, context: Context):
+
+        response = self.hook.conn.put_events(
+            **trim_none_values(
+                {
+                    "Entries": self.entries,
+                    "EndpointId": self.endpoint_id,
+                }
+            )
+        )
+
+        self.log.info("Sent %d events to EventBridge.", len(self.entries))
+
+        # If events have failed, log those error codes and messages to 
console, and raise an exception.
+
+        if "FailedEntryCount" in response:
+            self.log.error("Some events have failed to send.")
+            for event in response["Entries"]:
+                if "ErrorCode" in event:
+                    self.log.error(event)
+
+            raise AirflowException(
+                f"{response['FailedEntryCount']} entries in this request have 
failed to send."
+            )
+
+        return [e["EventId"] for e in response["Entries"]]

Review Comment:
   The task will fail when the number of entries exceeds 
`AIRFLOW__CORE__MAX_MAP_LENGTH` 
([doc](https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#max-map-length)).
   
   I suggest returning them when `do_xcom_push` is True (it's True by default, 
but at least the user will have an option to stop pushing xcom)
   ```suggestion
           if self.do_xcom_push:
               return [e["EventId"] for e in response["Entries"]]
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to