jroachgolf84 commented on code in PR #52001: URL: https://github.com/apache/airflow/pull/52001#discussion_r2164929570
########## providers/google/src/airflow/providers/google/cloud/operators/cloud_logging_sink.py: ########## @@ -0,0 +1,490 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any + +import google.cloud.exceptions +from google.api_core.exceptions import AlreadyExists +from google.cloud import logging_v2 + +from airflow.exceptions import AirflowException +from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook +from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator + +if TYPE_CHECKING: + from airflow.utils.context import Context + + +def _handle_excluison_filter(exclusion_filter): + exclusion_filter_config = [] + if isinstance(exclusion_filter, dict): + exclusion_filter_config.append(logging_v2.types.LogExclusion(**exclusion_filter)) + elif isinstance(exclusion_filter, list): + for f in exclusion_filter: + if isinstance(f, dict): + exclusion_filter_config.append(logging_v2.types.LogExclusion(**f)) + else: + exclusion_filter_config.append(f) + return exclusion_filter_config + + +class CloudLoggingCreateSinkOperator(GoogleCloudBaseOperator): + """ + Creates a Cloud Logging export sink in a GCP project. + + This operator creates a sink that exports log entries from Cloud Logging + to destinations like Cloud Storage, BigQuery, or Pub/Sub. + + :param sink_name: Required. Name of the sink to create. + :param destination: Required. Destination URI. Examples: + - Cloud Storage: 'storage.googleapis.com/my-bucket' + - BigQuery: 'bigquery.googleapis.com/projects/my-project/datasets/my_dataset' + - Pub/Sub: 'pubsub.googleapis.com/projects/my-project/topics/my-topic' + :param project_id: Required. The ID of the Google Cloud project. + :param filter_: Optional filter expression for selecting log entries. + If None, all log entries are exported. + :param exclusion_filter: Optional filter expressions for excluding logs. + :param unique_writer_identity: If True, creates a unique service account for the sink. + :param description: Optional description for the sink. + :param disabled: If True, creates the sink in disabled state. + :param bigquery_options: Optional,for destination bigquery, allow passing bigquery related configuration. + :param include_children: Whether to export logs from child resources. + :param gcp_conn_id: The connection ID used to connect to Google Cloud. + :param impersonation_chain: Optional service account to impersonate using short-term + credentials, or chained list of accounts required to get the access_token + of the last account in the list, which will be impersonated in the request. + If set as a string, the account must grant the originating account + the Service Account Token Creator IAM role. + If set as a sequence, the identities from the list must grant + Service Account Token Creator IAM role to the directly preceding identity, with first + account from the list granting this role to the originating account (templated). + """ + + template_fields: Sequence[str] = ( + "sink_name", + "destination", + "filter_", + "exclusion_filter", + "project_id", + "description", + "gcp_conn_id", + "impersonation_chain", + ) + + def __init__( + self, + sink_name: str, + destination: str, + project_id: str, + filter_: str | None = None, + exclusion_filter: Sequence[dict] | dict | None = None, + unique_writer_identity: bool = True, + description: str | None = None, + disabled: bool = False, + bigquery_options: dict | None = None, + include_children: bool = False, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: str | Sequence[str] | None = None, + **kwargs, + ): + super().__init__(**kwargs) + self.sink_name = sink_name + self.destination = destination + self.project_id = project_id + self.filter_ = filter_ + self.exclusion_filter = exclusion_filter + self.unique_writer_identity = unique_writer_identity + self.description = description + self.disabled = disabled + self.bigquery_options = bigquery_options + self.include_children = include_children + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def _validate_inputs(self): + """Validate required inputs.""" + missing_fields = [] + for field_name in ["sink_name", "destination", "project_id"]: + if not getattr(self, field_name): + missing_fields.append(field_name) + + if missing_fields: + raise AirflowException( + f"Required parameters are missing: {missing_fields}. These parameters must be passed as " + "keyword parameters or as extra fields in Airflow connection definition." + ) + + def execute(self, context: Context) -> dict[str, Any]: + """Execute the operator.""" + self._validate_inputs() + hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + + client = hook.get_conn() + parent = f"projects/{self.project_id}" + + # Build the sink configuration + sink_config = { + "name": self.sink_name, + "destination": self.destination, + "disabled": self.disabled, + "include_children": self.include_children, + } + + if self.filter_: + sink_config["filter"] = self.filter_ + if self.description: + sink_config["description"] = self.description + if self.exclusion_filter: + sink_config["exclusions"] = _handle_excluison_filter(self.exclusion_filter) + if self.bigquery_options: + if isinstance(self.bigquery_options, dict): + bigquery_options = logging_v2.types.BigQueryOptions(**self.bigquery_options) + sink_config["bigquery_options"] = bigquery_options + + sink = logging_v2.types.LogSink(**sink_config) + + try: + self.log.info("Creating log sink '%s' in project '%s'", self.sink_name, self.project_id) + self.log.info("Destination: %s", self.destination) + if self.filter_: + self.log.info("Filter: %s", self.filter_) + + response = client.create_sink( + request={ + "parent": parent, + "sink": sink, + "unique_writer_identity": self.unique_writer_identity, + } + ) + + self.log.info("Log sink created successfully: %s", response.name) + + if self.unique_writer_identity and hasattr(response, "writer_identity"): + self.log.info("Writer identity: %s", response.writer_identity) + self.log.info("Remember to grant appropriate permissions to the writer identity") + + return logging_v2.types.LogSink.to_dict(response) + + except AlreadyExists: + self.log.info( + "Already existed log sink, sink_name=%s, project_id=%s", + self.sink_name, + self.project_id, + ) + sink_path = f"projects/{self.project_id}/sinks/{self.sink_name}" + existing_sink = client.get_sink(request={"sink_name": sink_path}) + return logging_v2.types.LogSink.to_dict(existing_sink) + + except google.cloud.exceptions.GoogleCloudError as e: + self.log.error("An error occurred. Exiting.") + raise e + + +class CloudLoggingDeleteSinkOperator(GoogleCloudBaseOperator): + """ + Deletes a Cloud Logging export sink from a GCP project. + + :param sink_name: Required. Name of the sink to delete. + :param project_id: Required. The ID of the Google Cloud project. + :param gcp_conn_id: The connection ID used to connect to Google Cloud. + :param impersonation_chain: Optional service account to impersonate using short-term + credentials, or chained list of accounts required to get the access_token + of the last account in the list, which will be impersonated in the request. + If set as a string, the account must grant the originating account + the Service Account Token Creator IAM role. + If set as a sequence, the identities from the list must grant + Service Account Token Creator IAM role to the directly preceding identity, with first + account from the list granting this role to the originating account (templated). + """ + + template_fields: Sequence[str] = ("sink_name", "project_id", "gcp_conn_id", "impersonation_chain") + + def __init__( + self, + sink_name: str, + project_id: str, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: str | Sequence[str] | None = None, + **kwargs, + ): + super().__init__(**kwargs) + self.sink_name = sink_name + self.project_id = project_id + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def _validate_inputs(self): + """Validate required inputs.""" + missing_fields = [] + for field_name in ["sink_name", "project_id"]: + if not getattr(self, field_name): + missing_fields.append(field_name) + + if missing_fields: + raise AirflowException( + f"Required parameters are missing: {missing_fields}. These parameters must be passed as " + "keyword parameters or as extra fields in Airflow connection definition." + ) + + def execute(self, context: Context) -> dict[str, Any]: + """Execute the operator.""" + self._validate_inputs() + hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + + client = hook.get_conn() + sink_path = f"projects/{self.project_id}/sinks/{self.sink_name}" Review Comment: Just for my own knowledge; what are you looking to move to the hook? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org