harishkrao commented on code in PR #28950:
URL: https://github.com/apache/airflow/pull/28950#discussion_r1104018107


##########
airflow/providers/databricks/sensors/databricks_table_changes.py:
##########
@@ -0,0 +1,170 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+"""This module contains Databricks sensors."""
+
+from __future__ import annotations
+
+from datetime import datetime, timedelta
+from typing import Sequence
+
+from airflow.exceptions import AirflowException
+from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
+from airflow.providers.databricks.sensors.databricks_sql import 
DatabricksSqlSensor
+from airflow.utils.context import Context
+
+
+class DatabricksTableChangesSensor(DatabricksSqlSensor):
+    """Sensor to detect changes in a Delta table.
+
+
+    :param databricks_conn_id: Reference to :ref:`Databricks
+        connection id<howto/connection:databricks>` (templated), defaults to
+        DatabricksSqlHook.default_conn_name
+    :param http_path: Optional string specifying HTTP path of Databricks SQL 
Endpoint or cluster.
+        If not specified, it should be either specified in the Databricks 
connection's
+        extra parameters, or ``sql_endpoint_name`` must be specified.
+    :param sql_endpoint_name: Optional name of Databricks SQL Endpoint. If not 
specified, ``http_path`` must
+        be provided as described above, defaults to None
+    :param session_configuration: An optional dictionary of Spark session 
parameters. If not specified,
+        it could be specified in the Databricks connection's extra 
parameters., defaults to None
+    :param http_headers: An optional list of (k, v) pairs
+        that will be set as HTTP headers on every request. (templated).
+    :param _catalog: An optional initial catalog to use.
+        Requires DBR version 9.0+ (templated), defaults to ""
+    :param _schema: An optional initial schema to use.
+        Requires DBR version 9.0+ (templated), defaults to "default"
+    :param table_name: Table name to generate the SQL query, defaults to ""
+    :param handler: Handler for DbApiHook.run() to return results, defaults to 
fetch_all_handler
+    :param client_parameters: Additional parameters internal to Databricks SQL 
Connector parameters.
+    :param timestamp: Timestamp to check event history for a Delta table,
+        defaults to datetime.now()-timedelta(days=7)
+    :param filter operator: Operator to specify filter condition to check 
table changes.
+    """
+
+    template_fields: Sequence[str] = ("databricks_conn_id", "_catalog", 
"_schema", "table_name", "timestamp")
+
+    def __init__(
+        self,
+        table_name: str,
+        timestamp: datetime = datetime.now() - timedelta(days=7),
+        change_filter_operator: str = ">=",
+        *args,
+        **kwargs,
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.timestamp = timestamp
+        self.caller = "DatabricksTableChangesSensor"
+        self.change_filter_operator = change_filter_operator
+        self.table_name = table_name
+
+    def _get_hook(self) -> DatabricksSqlHook:
+        return DatabricksSqlHook(
+            self.databricks_conn_id,
+            self._http_path,
+            self._sql_endpoint_name,
+            self.session_config,
+            self.http_headers,
+            self._catalog,
+            self._schema,
+            self.caller,
+            **self.client_parameters,
+            **self.hook_params,
+        )
+
+    def _sql_sensor(self, sql):
+        hook = self._get_hook()
+        sql_result = hook.run(
+            sql,
+            handler=self.handler if self.do_xcom_push else None,
+        )
+        return sql_result
+
+    def _generate_query(
+        self,
+        prefix: str,
+        suffix: str,
+        joiner_val: str,
+        table_name: str,
+        time_range: str,
+        operation_filter: str,
+    ) -> str:
+        formatted_opts = ""
+        formatted_opts = f"{prefix} {table_name}{suffix} {joiner_val} 
'{time_range}'{operation_filter}"
+        self.log.debug("Formatted options: %s", formatted_opts)
+
+        return formatted_opts.strip()
+
+    @staticmethod
+    def get_previous_version(context: Context, lookup_key):
+        return context["ti"].xcom_pull(key=lookup_key, 
include_prior_dates=True)
+
+    @staticmethod
+    def set_version(context: Context, lookup_key, version):
+        context["ti"].xcom_push(key=lookup_key, value=version)
+
+    def get_current_table_version(self, table_name, time_range, operator):
+        _count_describe_literal = "SELECT MAX(version) AS versions FROM 
(DESCRIBE HISTORY"
+        _filter_predicate_literal = ") WHERE timestamp"
+        _operation_filter_literal = "AND operation NOT LIKE '%CONVERT%' AND 
operation NOT LIKE '%OPTIMIZE%' \

Review Comment:
   Yes, added `FSCK`. Changed it to a `NOT IN` filter. Additionally, 
interesting to note that (on running a few commands like) `FSCK`, `OPTIMIZE`  
are not recorded in the history of the Delta table.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to