Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MINOR - Skip delta tests for 3.11 #14398

Merged
merged 17 commits into from
Dec 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def get_tables_name_and_type(self) -> Optional[Iterable[Tuple[str, str]]]:
:return: tables or views, depending on config
"""
schema_name = self.context.database_schema
for table in self.spark.catalog.listTables(schema_name):
for table in self.spark.catalog.listTables(dbName=schema_name):
try:
table_name = table.name
table_fqn = fqn.build(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"""
import json
import logging
import textwrap
import traceback
from collections import defaultdict
from copy import deepcopy
Expand Down Expand Up @@ -200,7 +201,12 @@ def _parse_xlets(xlet: Any) -> None:

@_parse_xlets.register
@deprecated(
message="Please update your inlets/outlets to follow <TODO DOCS>",
message=textwrap.dedent(
"""
Please update your inlets/outlets to follow
https://docs.open-metadata.org/connectors/pipeline/airflow/configuring-lineage
"""
),
release="1.4.0",
)
def dictionary_lineage_annotation(xlet: dict) -> Dict[str, List[OMEntity]]:
Expand Down
12 changes: 6 additions & 6 deletions ingestion/tests/integration/ometa/test_ometa_domains_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,13 +136,13 @@ def test_create(self):
"""

res: Domain = self.metadata.create_or_update(data=self.create_domain)
self.assertEquals(res.name, self.create_domain.name)
self.assertEquals(res.description, self.create_domain.description)
self.assertEqual(res.name, self.create_domain.name)
self.assertEqual(res.description, self.create_domain.description)

res: DataProduct = self.metadata.create_or_update(data=self.create_data_product)
self.assertEquals(res.name, self.create_data_product.name)
self.assertEquals(res.description, self.create_data_product.description)
self.assertEquals(res.domain.name, self.create_data_product.domain.__root__)
self.assertEqual(res.name, self.create_data_product.name)
self.assertEqual(res.description, self.create_data_product.description)
self.assertEqual(res.domain.name, self.create_data_product.domain.__root__)

def test_get_name(self):
"""We can fetch Domains & Data Products by name"""
Expand Down Expand Up @@ -189,4 +189,4 @@ def test_patch_domain(self):
entity=Dashboard, fqn=self.dashboard.fullyQualifiedName, fields=["domain"]
)

self.assertEquals(updated_dashboard.domain.name, domain.name.__root__)
self.assertEqual(updated_dashboard.domain.name, domain.name.__root__)
2 changes: 1 addition & 1 deletion ingestion/tests/integration/ometa/test_ometa_es_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def test_get_query_with_lineage_filter(self):
'{"query": {"bool": {"must": [{"term": {"processedLineage": true}},'
' {"term": {"service.name.keyword": "my_service"}}]}}}'
)
self.assertEquals(res, quote(expected))
self.assertEqual(res, quote(expected))

def test_get_queries_with_lineage(self):
"""Check the payload from ES"""
Expand Down
2 changes: 1 addition & 1 deletion ingestion/tests/integration/test_suite/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def test_create_workflow_config_with_table_without_suite(self):
)[0]

self.assertIsNone(table.testSuite)
self.assertEquals(
self.assertEqual(
table_and_tests.right.executable_test_suite.name.__root__,
self.table.fullyQualifiedName.__root__ + ".testSuite",
)
Expand Down
14 changes: 7 additions & 7 deletions ingestion/tests/unit/airflow/test_lineage_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def assertXLetsEquals(self, first: List[XLets], second: List[XLets]):
For this test, we will assume that by having the same FQN, the
entity type will also be the same.
"""
self.assertEquals(len(first), len(second))
self.assertEqual(len(first), len(second))

for xlet1 in first:
match = False
Expand Down Expand Up @@ -305,7 +305,7 @@ def test_om_entity_serializer(self):
fqn="FQN",
key="test",
)
self.assertEquals(
self.assertEqual(
str(om_entity),
'{"entity": "metadata.generated.schema.entity.data.table.Table", "fqn": "FQN", "key": "test"}',
)
Expand All @@ -315,7 +315,7 @@ def test_om_entity_serializer(self):
fqn="FQN",
key="test",
)
self.assertEquals(
self.assertEqual(
str(om_entity),
'{"entity": "metadata.generated.schema.entity.data.container.Container", "fqn": "FQN", "key": "test"}',
)
Expand All @@ -332,7 +332,7 @@ def test_str_deserializer(self):
"""
self.assertIsNone(_parse_xlets("random"))

self.assertEquals(
self.assertEqual(
_parse_xlets(
'{"entity": "metadata.generated.schema.entity.data.table.Table", "fqn": "FQN", "key": "test"}'
),
Expand All @@ -347,7 +347,7 @@ def test_str_deserializer(self):
},
)

self.assertEquals(
self.assertEqual(
_parse_xlets(
'{"entity": "metadata.generated.schema.entity.data.container.Container", "fqn": "FQN", "key": "test"}'
),
Expand All @@ -362,7 +362,7 @@ def test_str_deserializer(self):
},
)

self.assertEquals(
self.assertEqual(
_parse_xlets(
'{"entity": "metadata.generated.schema.entity.data.dashboard.Dashboard", "fqn": "FQN", "key": "test"}'
),
Expand All @@ -385,7 +385,7 @@ def test_airflow_serializer(self):
key="test",
)

self.assertEquals(
self.assertEqual(
serialize(om_entity).get("__data__"),
'{"entity": "metadata.generated.schema.entity.data.table.Table", "fqn": "FQN", "key": "test"}',
)
20 changes: 10 additions & 10 deletions ingestion/tests/unit/readers/test_df_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ def test_dsv_no_extension_reader(self):
self.assertIsNotNone(df_list)
self.assertTrue(len(df_list))

self.assertEquals(df_list[0].shape, (5, 2))
self.assertEquals(
self.assertEqual(df_list[0].shape, (5, 2))
self.assertEqual(
list(df_list[0].columns), ["transaction_id", "transaction_value"]
)

Expand All @@ -62,8 +62,8 @@ def test_dsv_reader(self):
self.assertIsNotNone(df_list)
self.assertTrue(len(df_list))

self.assertEquals(df_list[0].shape, (5, 2))
self.assertEquals(
self.assertEqual(df_list[0].shape, (5, 2))
self.assertEqual(
list(df_list[0].columns), ["transaction_id", "transaction_value"]
)

Expand All @@ -81,8 +81,8 @@ def test_dsv_reader_with_separator(self):
self.assertIsNotNone(df_list)
self.assertTrue(len(df_list))

self.assertEquals(df_list[0].shape, (5, 2))
self.assertEquals(
self.assertEqual(df_list[0].shape, (5, 2))
self.assertEqual(
list(df_list[0].columns), ["transaction_id", "transaction_value"]
)

Expand All @@ -98,8 +98,8 @@ def test_json_reader(self):
self.assertIsNotNone(df_list)
self.assertTrue(len(df_list))

self.assertEquals(df_list[0].shape, (4, 4))
self.assertEquals(
self.assertEqual(df_list[0].shape, (4, 4))
self.assertEqual(
list(df_list[0].columns),
["name", "id", "version", "Company"],
)
Expand All @@ -116,8 +116,8 @@ def test_avro_reader(self):
self.assertIsNotNone(df_list)
self.assertTrue(len(df_list))

self.assertEquals(df_list[0].shape, (4, 8))
self.assertEquals(
self.assertEqual(df_list[0].shape, (4, 8))
self.assertEqual(
list(df_list[0].columns),
[
"Boolean",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def test_explore_col_parser(self):
),
]

self.assertEquals(cols, expected_cols)
self.assertEqual(cols, expected_cols)

def test_view_col_parser(self):
"""
Expand Down Expand Up @@ -278,4 +278,4 @@ def test_view_col_parser(self):
),
]

self.assertEquals(cols, expected_cols)
self.assertEqual(cols, expected_cols)
6 changes: 6 additions & 0 deletions ingestion/tests/unit/topology/database/test_deltalake.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
Here we don't need to patch, as we can just create our own metastore
"""
import shutil
import sys
import unittest
from datetime import date, datetime
from unittest import TestCase

Expand Down Expand Up @@ -100,6 +102,10 @@
)


@unittest.skipUnless(
sys.version_info < (3, 11),
reason="https://github.com/open-metadata/OpenMetadata/issues/14408",
)
class DeltaLakeUnitTest(TestCase):
"""
Add method validations from Deltalake ingestion
Expand Down
12 changes: 6 additions & 6 deletions ingestion/tests/unit/topology/pipeline/test_airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def test_parsing(self):
owners=None,
)

self.assertEquals(
self.assertEqual(
dag.tasks[0].inlets,
[
{
Expand All @@ -162,7 +162,7 @@ def test_parsing(self):
}
],
)
self.assertEquals(
self.assertEqual(
dag.tasks[1].outlets,
[
{
Expand All @@ -178,23 +178,23 @@ def test_get_schedule_interval(self):
"""

pipeline_data = {"schedule_interval": None}
self.assertEquals(get_schedule_interval(pipeline_data), None)
self.assertIsNone(get_schedule_interval(pipeline_data))

pipeline_data = {"schedule_interval": {"__var": 86400.0, "__type": "timedelta"}}
self.assertEquals(get_schedule_interval(pipeline_data), "1 day, 0:00:00")
self.assertEqual(get_schedule_interval(pipeline_data), "1 day, 0:00:00")

pipeline_data = {
"timetable": {
"__type": "airflow.timetables.simple.OnceTimetable",
"__var": {},
}
}
self.assertEquals(get_schedule_interval(pipeline_data), "@once")
self.assertEqual(get_schedule_interval(pipeline_data), "@once")

pipeline_data = {
"timetable": {
"__type": "airflow.timetables.interval.CronDataIntervalTimetable",
"__var": {"expression": "*/2 * * * *", "timezone": "UTC"},
}
}
self.assertEquals(get_schedule_interval(pipeline_data), "*/2 * * * *")
self.assertEqual(get_schedule_interval(pipeline_data), "*/2 * * * *")
2 changes: 1 addition & 1 deletion ingestion/tests/unit/utils/test_deprecation.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def test_deprecation_warning(self) -> None:
self.deprecated_call()

# Verify the result
self.assertEquals(len(warn), 1)
self.assertEqual(len(warn), 1)
self.assertTrue(issubclass(warn[0].category, DeprecationWarning))
self.assertTrue("This is a deprecation" in str(warn[0].message))
self.assertTrue("x.y.z" in str(warn[0].message))
8 changes: 4 additions & 4 deletions ingestion/tests/unit/utils/test_stored_procedures.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,28 +21,28 @@ class StoredProceduresTests(TestCase):

def test_get_procedure_name_from_call(self):
"""Check that we properly parse CALL queries"""
self.assertEquals(
self.assertEqual(
get_procedure_name_from_call(
query_text="CALL db.schema.procedure_name(...)",
),
"procedure_name",
)

self.assertEquals(
self.assertEqual(
get_procedure_name_from_call(
query_text="CALL schema.procedure_name(...)",
),
"procedure_name",
)

self.assertEquals(
self.assertEqual(
get_procedure_name_from_call(
query_text="CALL procedure_name(...)",
),
"procedure_name",
)

self.assertEquals(
self.assertEqual(
get_procedure_name_from_call(
query_text="CALL DB.SCHEMA.PROCEDURE_NAME(...)",
),
Expand Down
10 changes: 4 additions & 6 deletions ingestion/tests/unit/workflow/test_base_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,14 +163,12 @@ def test_workflow_executes(self):
@pytest.mark.order(2)
def test_workflow_status(self):
# Everything is processed properly in the Source
self.assertEquals(
self.workflow.source.status.records, ["0", "1", "2", "3", "4"]
)
self.assertEquals(len(self.workflow.source.status.failures), 0)
self.assertEqual(self.workflow.source.status.records, ["0", "1", "2", "3", "4"])
self.assertEqual(len(self.workflow.source.status.failures), 0)

# We catch one error in the Sink
self.assertEquals(len(self.workflow.steps[0].status.records), 4)
self.assertEquals(len(self.workflow.steps[0].status.failures), 1)
self.assertEqual(len(self.workflow.steps[0].status.records), 4)
self.assertEqual(len(self.workflow.steps[0].status.failures), 1)

@pytest.mark.order(3)
def test_workflow_raise_status(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,8 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe

## Requirements

{%inlineCallout icon="description" bold="OpenMetadata 0.12 or later" href="/deployment"%}
To deploy OpenMetadata, check the Deployment guides.
{%/inlineCallout%}
Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector
for Python 3.11

## Metadata Ingestion

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,8 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe

## Requirements

{%inlineCallout icon="description" bold="OpenMetadata 0.12 or later" href="/deployment"%}
To deploy OpenMetadata, check the Deployment guides.
{%/inlineCallout%}


Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector
for Python 3.11

### Python Requirements

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ public abstract class EntityResourceTest<T extends EntityInterface, K extends Cr
public static final String DATA_CONSUMER_ROLE_NAME = "DataConsumer";

public static final String ENTITY_LINK_MATCH_ERROR =
"[entityLink must match \"^(?U)<#E::\\w+::[\\w'\\- .&/:+\"\\\\()$#%]+>$\"]";
"[entityLink must match \"(?U)^<#E::\\w+::[\\w'\\- .&/:+\"\\\\()$#%]+>$\"]";

// Random unicode string generator to test entity name accepts all the unicode characters
protected static final RandomStringGenerator RANDOM_STRING_GENERATOR =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ void post_feedWithInvalidAbout_4xx() {
// Create thread without addressed to entity in the request
CreateThread create = create().withFrom(USER.getName()).withAbout("<>"); // Invalid EntityLink

String failureReason = "[about must match \"^(?U)<#E::\\w+::[\\w'\\- .&/:+\"\\\\()$#%]+>$\"]";
String failureReason = "[about must match \"(?U)^<#E::\\w+::[\\w'\\- .&/:+\"\\\\()$#%]+>$\"]";
assertResponseContains(() -> createThread(create, USER_AUTH_HEADERS), BAD_REQUEST, failureReason);

create.withAbout("<#E::>"); // Invalid EntityLink - missing entityType and entityId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ void testImportInvalidCsv() {
Awaitility.await().atMost(4, TimeUnit.SECONDS).until(() -> true);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows = {
resultsHeader, getFailedRecord(record, "[name must match \"\"^(?U)[\\w'\\- .&()%]+$\"\"]")
resultsHeader, getFailedRecord(record, "[name must match \"\"(?U)^[\\w'\\- .&()%]+$\"\"]")
};
assertRows(result, expectedRows);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public void post_entityCreateWithInvalidName_400() {
// Names can't start with capital letter, can't have space, hyphen, apostrophe
String[] tests = {"a bc", "a-bc", "a'b"};

String error = "[name must match \"^(?U)[\\w]+$\"]";
String error = "[name must match \"(?U)^[\\w]+$\"]";
CreateType create = createRequest("placeHolder", "", "", null);
for (String test : tests) {
LOG.info("Testing with the name {}", test);
Expand Down
Loading
Loading