ashb commented on code in PR #31398:
URL: https://github.com/apache/airflow/pull/31398#discussion_r1200652008
##########
airflow/providers/openlineage/utils/sqlparser.py:
##########
Review Comment:
Personal nit/bugbear of mine: I don't like `utils/` folders as they become a
dumping ground for all manner of unrelated fields.
Could this perhaps be `airflow/providers/openlineage/sqlparser.py` instead?
##########
airflow/providers/common/sql/operators/sql.py:
##########
@@ -290,6 +290,32 @@ def prepare_template(self) -> None:
if isinstance(self.parameters, str):
self.parameters = ast.literal_eval(self.parameters)
+ def get_openlineage_facets_on_start(self):
+ try:
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.sqlparser import SQLParser
+ except ImportError:
+ return None
+
+ hook: DbApiHook = self.get_db_hook()
+
+ connection = hook.get_connection(getattr(hook, cast(str,
hook.conn_name_attr)))
+ try:
+ database_info = hook.get_database_info(connection)
+ except AttributeError:
+ self.log.debug("%s has no database info provided", hook)
+ return None
+
+ sql_parser = SQLParser(
+ dialect=hook.get_database_dialect(connection),
default_schema=hook.get_default_schema()
+ )
+
+ operator_lineage: OperatorLineage =
sql_parser.generate_openlineage_metadata_from_sql(
+ sql=self.sql, hook=hook, database_info=database_info,
database=self.database
Review Comment:
Ditto here -- not sure if this could throw?
##########
airflow/providers/openlineage/utils/sqlparser.py:
##########
@@ -0,0 +1,263 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Callable, Iterable
+
+from attr import define
Review Comment:
`attrs` is the recommend name to use now
```suggestion
from attrs import define
```
##########
airflow/providers/openlineage/utils/sqlparser.py:
##########
@@ -0,0 +1,263 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Callable, Iterable
+
+from attr import define
+
+from airflow.providers.openlineage.extractors.base import OperatorLineage
+from airflow.providers.openlineage.utils.sql import (
+ TablesHierarchy,
+ create_information_schema_query,
+ get_table_schemas,
+)
+from airflow.typing_compat import TypedDict
+from openlineage.client.facet import ExtractionError, ExtractionErrorRunFacet,
SqlJobFacet
+from openlineage.client.run import Dataset
+from openlineage.common.sql import DbTableMeta, SqlMeta, parse
+
+if TYPE_CHECKING:
+ from airflow.hooks.base import BaseHook
+
+DEFAULT_NAMESPACE = "default"
+DEFAULT_INFORMATION_SCHEMA_COLUMNS = [
+ "table_schema",
+ "table_name",
+ "column_name",
+ "ordinal_position",
+ "udt_name",
+]
+DEFAULT_INFORMATION_SCHEMA_TABLE_NAME = "information_schema.columns"
+
+
+def default_normalize_name_method(name: str) -> str:
+ return name.lower()
+
+
+class GetTableSchemasParams(TypedDict):
+ """get_table_schemas params"""
+
+ normalize_name: Callable[[str], str]
+ is_cross_db: bool
+ information_schema_columns: list[str]
+ information_schema_table: str
+ is_uppercase_names: bool
+ allow_trailing_semicolon: bool
+ database: str | None
+
+
+@define
+class DatabaseInfo:
+ """
+ Contains database specific information needed to process
+ SQL statement parse result.
+
+ :param scheme: Scheme part of URI in OpenLineage namespace.
+ :param authority: Authority part of URI in OpenLineage namespace.
+ :param database: Takes precedence over parsed database name.
+ :param information_schema_columns: List of columns names from information
schema table.
+ :param information_schema_table_name: Information schema table name.
+ :param is_information_schema_cross_db: Specifies if information schema
contains
+ cross-database data.
+ :param is_uppercase_names: Specifies if database accepts only uppercase
names (e.g. Snowflake).
+ :param allow_trailing_semicolon: For some databases such as Trino,
+ trailing semicolon can cause a syntax error. If True it adds semicolon
at the end of query.
+ :param normalize_name_method: Method to normalize database, schema and
table names.
+ Defaults to `name.lower()`.
+ """
+
+ scheme: str
+ authority: str | None = None
+ database: str | None = None
+ information_schema_columns: list[str] = DEFAULT_INFORMATION_SCHEMA_COLUMNS
+ information_schema_table_name: str = DEFAULT_INFORMATION_SCHEMA_TABLE_NAME
+ is_information_schema_cross_db: bool = False
+ is_uppercase_names: bool = False
+ allow_trailing_semicolon: bool = True
+ normalize_name_method: Callable[[str], str] = default_normalize_name_method
+
+
+class SQLParser:
+ """
+ An interface for `openlineage_sql` library.
+
+ :param dialect: dialect specific to the database
+ :param default_schema: schema applied to each table with no schema parsed
+ """
+
+ def __init__(self, dialect: str | None = None, default_schema: str | None
= None) -> None:
+ self.dialect = dialect
+ self.default_schema = default_schema
+
+ def parse(self, sql: list[str] | str) -> SqlMeta | None:
+ """Parse a single or a list of SQL statements"""
+ parse_result: SqlMeta | None = parse(
+ sql=sql, dialect=self.dialect, default_schema=self.default_schema
+ )
+ return parse_result
+
+ def parse_table_schemas(
+ self,
+ hook: BaseHook,
+ inputs: list[DbTableMeta],
+ outputs: list[DbTableMeta],
+ database_info: DatabaseInfo,
+ namespace: str = DEFAULT_NAMESPACE,
+ database: str | None = None,
+ ) -> tuple[list[Dataset], ...]:
+ """Parse schemas for input and output tables."""
+ database_kwargs: GetTableSchemasParams = dict(
+ normalize_name=database_info.normalize_name_method,
+ is_cross_db=database_info.is_information_schema_cross_db,
+
information_schema_columns=database_info.information_schema_columns,
+
information_schema_table=database_info.information_schema_table_name,
+ is_uppercase_names=database_info.is_uppercase_names,
+ allow_trailing_semicolon=database_info.allow_trailing_semicolon,
+ database=database or database_info.database,
+ )
+ return get_table_schemas(
+ hook,
+ namespace,
+ database or database_info.database,
+ SQLParser.create_information_schema_query(tables=inputs,
**database_kwargs) if inputs else None,
+ SQLParser.create_information_schema_query(tables=outputs,
**database_kwargs) if outputs else None,
+ )
+
+ def generate_openlineage_metadata_from_sql(
+ self,
+ sql: list[str] | str,
+ hook: BaseHook,
+ database_info: DatabaseInfo,
+ database: str | None = None,
+ ) -> OperatorLineage:
+ """
+ Parses SQL statement(s) and generate OpenLineage metadata:
+ * input tables with schemas parsed
+ * output tables with schemas parsed
+ * run facets
+ * job facets
+
+ :param sql: a SQL statement or list of SQL statement to be parsed
+ :param hook: Airflow Hook used to connect to the database
+ :param database_info: database specific information
+ :param database: when passed it takes precedence over parsed database
name
+ """
+ job_facets = {"sql": SqlJobFacet(query=SQLParser.normalize_sql(sql))}
+
+ parse_result: SqlMeta | None = self.parse(sql)
+ if not parse_result:
+ return OperatorLineage(job_facets=job_facets)
+
+ run_facets: dict = {}
+
+ if parse_result.errors:
+ run_facets["extractionError"] = ExtractionErrorRunFacet(
+ totalTasks=len(sql) if isinstance(sql, list) else 1,
+ failedTasks=len(parse_result.errors),
+ errors=[
+ ExtractionError(
+ errorMessage=error.message,
+ stackTrace=None,
+ task=error.origin_statement,
+ taskNumber=error.index,
+ )
+ for error in parse_result.errors
+ ],
+ )
+
+ namespace = (
+ f"{database_info.scheme}://{database_info.authority}"
+ if database_info.authority
+ else database_info.scheme
+ )
+ inputs, outputs = self.parse_table_schemas(
+ hook=hook,
+ inputs=parse_result.in_tables,
+ outputs=parse_result.out_tables,
+ namespace=namespace,
+ database=database,
+ database_info=database_info,
+ )
+
+ return OperatorLineage(
+ inputs=inputs,
+ outputs=outputs,
+ run_facets=run_facets,
+ job_facets=job_facets,
+ )
+
+ @staticmethod
+ def normalize_sql(sql: str | Iterable[str]) -> str:
+ """Makes sure to return a semicolon-separated SQL statements"""
+ if isinstance(sql, str):
+ sql = [stmt for stmt in sql.split(";") if stmt != ""]
+ sql = [obj for stmt in sql for obj in stmt.split(";") if obj != ""]
Review Comment:
This won't work for pg/PSQL's multiline blocks:
```sql
CREATE FUNCTION somefunc() RETURNS integer AS $$
BEGIN
...
END;
$$ LANGUAGE plpgsql```
(Can be fixed in another PR as I'm guess this is porting from OL Airflow
plugin?)
##########
airflow/providers/common/sql/operators/sql.py:
##########
@@ -290,6 +290,32 @@ def prepare_template(self) -> None:
if isinstance(self.parameters, str):
self.parameters = ast.literal_eval(self.parameters)
+ def get_openlineage_facets_on_start(self):
+ try:
+ from airflow.providers.openlineage.extractors import
OperatorLineage
+ from airflow.providers.openlineage.utils.sqlparser import SQLParser
+ except ImportError:
+ return None
+
+ hook: DbApiHook = self.get_db_hook()
+
+ connection = hook.get_connection(getattr(hook, cast(str,
hook.conn_name_attr)))
+ try:
+ database_info = hook.get_database_info(connection)
+ except AttributeError:
+ self.log.debug("%s has no database info provided", hook)
+ return None
+
+ sql_parser = SQLParser(
+ dialect=hook.get_database_dialect(connection),
default_schema=hook.get_default_schema()
Review Comment:
Worth putting this in a try/except block so it doesn't fail if OL doesn't
know about the particular conn dialect?
##########
airflow/providers/openlineage/utils/sqlparser.py:
##########
@@ -0,0 +1,263 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Callable, Iterable
+
+from attr import define
+
+from airflow.providers.openlineage.extractors.base import OperatorLineage
+from airflow.providers.openlineage.utils.sql import (
+ TablesHierarchy,
+ create_information_schema_query,
+ get_table_schemas,
+)
+from airflow.typing_compat import TypedDict
+from openlineage.client.facet import ExtractionError, ExtractionErrorRunFacet,
SqlJobFacet
+from openlineage.client.run import Dataset
+from openlineage.common.sql import DbTableMeta, SqlMeta, parse
+
+if TYPE_CHECKING:
+ from airflow.hooks.base import BaseHook
+
+DEFAULT_NAMESPACE = "default"
+DEFAULT_INFORMATION_SCHEMA_COLUMNS = [
+ "table_schema",
+ "table_name",
+ "column_name",
+ "ordinal_position",
+ "udt_name",
+]
+DEFAULT_INFORMATION_SCHEMA_TABLE_NAME = "information_schema.columns"
+
+
+def default_normalize_name_method(name: str) -> str:
+ return name.lower()
+
+
+class GetTableSchemasParams(TypedDict):
+ """get_table_schemas params"""
+
+ normalize_name: Callable[[str], str]
+ is_cross_db: bool
+ information_schema_columns: list[str]
+ information_schema_table: str
+ is_uppercase_names: bool
+ allow_trailing_semicolon: bool
+ database: str | None
+
+
+@define
+class DatabaseInfo:
+ """
+ Contains database specific information needed to process
+ SQL statement parse result.
+
+ :param scheme: Scheme part of URI in OpenLineage namespace.
+ :param authority: Authority part of URI in OpenLineage namespace.
+ :param database: Takes precedence over parsed database name.
+ :param information_schema_columns: List of columns names from information
schema table.
+ :param information_schema_table_name: Information schema table name.
+ :param is_information_schema_cross_db: Specifies if information schema
contains
+ cross-database data.
+ :param is_uppercase_names: Specifies if database accepts only uppercase
names (e.g. Snowflake).
+ :param allow_trailing_semicolon: For some databases such as Trino,
+ trailing semicolon can cause a syntax error. If True it adds semicolon
at the end of query.
+ :param normalize_name_method: Method to normalize database, schema and
table names.
+ Defaults to `name.lower()`.
+ """
+
+ scheme: str
+ authority: str | None = None
+ database: str | None = None
+ information_schema_columns: list[str] = DEFAULT_INFORMATION_SCHEMA_COLUMNS
+ information_schema_table_name: str = DEFAULT_INFORMATION_SCHEMA_TABLE_NAME
Review Comment:
Is this generic enough to be a worthwhile default, or would making it a
required field make more sense?
##########
airflow/providers/common/sql/hooks/sql.py:
##########
@@ -517,3 +518,30 @@ def test_connection(self):
message = str(e)
return status, message
+
+ def get_database_info(self, connection):
+ from airflow.providers.openlineage.utils.sqlparser import DatabaseInfo
+
+ return DatabaseInfo(
+ scheme=self.get_database_dialect(connection),
authority=self.get_authority(connection)
+ )
+
+ def get_database_dialect(self, connection):
+ """Method used for SQL parsing. Naively tries to use Connection's
conn_type"""
+ return connection.conn_type
+
+ def get_authority(self, connection):
Review Comment:
Is this envisaged/meant to be public or not?
```suggestion
def _get_authority(self, connection):
```
##########
generated/provider_dependencies.json:
##########
@@ -231,7 +231,9 @@
"deps": [
"sqlparse>=0.4.2"
],
- "cross-providers-deps": []
+ "cross-providers-deps": [
+ "openlineage"
Review Comment:
Does this make it a required dep or optional?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]