Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test(ingest/sql): refactor CLL generator + add tests #10580

Merged
merged 22 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def extract(self) -> Optional[TaskMetadata]:
)

operator: "BigQueryInsertJobOperator" = self.operator
sql = operator.configuration.get("query")
sql = operator.configuration.get("query", {}).get("query")
if not sql:
self.log.warning("No query found in BigQueryInsertJobOperator")
return None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,10 +247,11 @@ def _extract_lineage(
SQL_PARSING_RESULT_KEY, None
)
if sql_parsing_result:
if sql_parsing_result.debug_info.error:
datajob.properties["datahub_sql_parser_error"] = str(
sql_parsing_result.debug_info.error
)
if error := sql_parsing_result.debug_info.error:
logger.info(f"SQL parsing error: {error}", exc_info=error)
datajob.properties[
"datahub_sql_parser_error"
] = f"{type(error).__name__}: {error}"
if not sql_parsing_result.debug_info.table_error:
input_urns.extend(sql_parsing_result.in_tables)
output_urns.extend(sql_parsing_result.out_tables)
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -373,6 +373,7 @@ def test_airflow_plugin(
golden_path=golden_path,
ignore_paths=[
# TODO: If we switched to Git urls, maybe we could get this to work consistently.
r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['datahub_sql_parser_error'\]",
r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['openlineage_.*'\]",
],
)
Expand Down
6 changes: 4 additions & 2 deletions metadata-ingestion-modules/airflow-plugin/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# and then run "tox" from this directory.

[tox]
envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27, py310-airflow28
envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27, py310-airflow28, py310-airflow29

[testenv]
use_develop = true
Expand All @@ -19,13 +19,15 @@ deps =
airflow26: apache-airflow~=2.6.0
airflow27: apache-airflow~=2.7.0
airflow28: apache-airflow~=2.8.0
airflow29: apache-airflow~=2.9.0

# Respect the Airflow constraints files.
# We can't make ourselves work with the constraints of Airflow < 2.3.
py310-airflow24: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt
py310-airflow26: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt
py310-airflow27: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt
py310-airflow28: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt
py310-airflow29: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.9.1/constraints-3.10.txt

# Before pinning to the constraint files, we previously left the dependencies
# more open. There were a number of packages for which this caused issues.
Expand Down Expand Up @@ -53,6 +55,6 @@ commands =
[testenv:py310-airflow24]
extras = dev,integration-tests,plugin-v2,test-airflow24

[testenv:py310-airflow{26,27,28}]
[testenv:py310-airflow{26,27,28,29}]
extras = dev,integration-tests,plugin-v2

4 changes: 2 additions & 2 deletions metadata-ingestion/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@

sqlglot_lib = {
# Using an Acryl fork of sqlglot.
# https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1
"acryl-sqlglot[rs]==23.17.1.dev10",
# https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:main?expand=1
"acryl-sqlglot[rs]==24.0.1.dev7",
}

classification_lib = {
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/ingestion/api/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def to_pure_python_obj(some_val: Any) -> Any:
if isinstance(some_val, SupportsAsObj):
return some_val.as_obj()
elif isinstance(some_val, pydantic.BaseModel):
return some_val.dict()
return Report.to_pure_python_obj(some_val.dict())
elif dataclasses.is_dataclass(some_val):
return dataclasses.asdict(some_val)
elif isinstance(some_val, list):
Expand Down
8 changes: 7 additions & 1 deletion metadata-ingestion/src/datahub/sql_parsing/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,13 @@ def __lt__(self, other: "_FrozenModel") -> bool:
for field in self.__fields__:
self_v = getattr(self, field)
other_v = getattr(other, field)
if self_v != other_v:

# Handle None values by pushing them to the end of the ordering.
if self_v is None and other_v is not None:
return False
elif self_v is not None and other_v is None:
return True
elif self_v != other_v:
return self_v < other_v

return False
Expand Down
75 changes: 75 additions & 0 deletions metadata-ingestion/src/datahub/sql_parsing/query_types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
from typing import Optional, Tuple

import sqlglot

from datahub.sql_parsing.sql_parsing_common import QueryType, QueryTypeProps
from datahub.sql_parsing.sqlglot_utils import (
DialectOrStr,
get_dialect,
is_dialect_instance,
)


def _is_temp_table(table: sqlglot.exp.Table, dialect: sqlglot.Dialect) -> bool:
identifier: sqlglot.exp.Identifier = table.this

return identifier.args.get("temporary") or (
is_dialect_instance(dialect, "redshift") and identifier.name.startswith("#")
)


def _get_create_type_from_kind(kind: Optional[str]) -> QueryType:
if kind and "TABLE" in kind:
return QueryType.CREATE_TABLE_AS_SELECT
elif kind and "VIEW" in kind:
return QueryType.CREATE_VIEW
else:
return QueryType.CREATE_OTHER


def get_query_type_of_sql(
expression: sqlglot.exp.Expression, dialect: DialectOrStr
) -> Tuple[QueryType, QueryTypeProps]:
dialect = get_dialect(dialect)
query_type_props: QueryTypeProps = {}

# For creates, we need to look at the inner expression.
if isinstance(expression, sqlglot.exp.Create):
if is_create_table_ddl(expression):
return QueryType.CREATE_DDL, query_type_props

kind = expression.args.get("kind")
if kind:
kind = kind.upper()
query_type_props["kind"] = kind

target = expression.this
if any(
isinstance(prop, sqlglot.exp.TemporaryProperty)
for prop in (expression.args.get("properties") or [])
) or _is_temp_table(target, dialect=dialect):
query_type_props["temporary"] = True

query_type = _get_create_type_from_kind(kind)
return query_type, query_type_props

# UPGRADE: Once we use Python 3.10, replace this with a match expression.
mapping = {
sqlglot.exp.Select: QueryType.SELECT,
sqlglot.exp.Insert: QueryType.INSERT,
sqlglot.exp.Update: QueryType.UPDATE,
sqlglot.exp.Delete: QueryType.DELETE,
sqlglot.exp.Merge: QueryType.MERGE,
sqlglot.exp.Query: QueryType.SELECT, # unions, etc. are also selects
}

for cls, query_type in mapping.items():
if isinstance(expression, cls):
return query_type, query_type_props
return QueryType.UNKNOWN, {}


def is_create_table_ddl(statement: sqlglot.exp.Expression) -> bool:
return isinstance(statement, sqlglot.exp.Create) and isinstance(
statement.this, sqlglot.exp.Schema
)
Loading
Loading