diff --git a/bootstrap/sql/migrations/native/1.6.0/mysql/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.6.0/mysql/postDataMigrationSQLScript.sql index 94575132ae6b..f0c7f6123705 100644 --- a/bootstrap/sql/migrations/native/1.6.0/mysql/postDataMigrationSQLScript.sql +++ b/bootstrap/sql/migrations/native/1.6.0/mysql/postDataMigrationSQLScript.sql @@ -4,4 +4,13 @@ INNER JOIN test_case tc ON dqdts.entityFQNHash = tc.fqnHash SET dqdts.json = JSON_SET(dqdts.json, '$.testCaseFQN', tc.json->'$.fullyQualifiedName', '$.id', (SELECT UUID()) -); \ No newline at end of file +); + +-- Add id column to data_quality_data_time_series table +-- after we have added the id values to the records +ALTER TABLE data_quality_data_time_series +ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, +ADD CONSTRAINT UNIQUE (id); + +-- Create index on id column +CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id); \ No newline at end of file diff --git a/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql b/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql index af2f0ae4d9cf..e69de29bb2d1 100644 --- a/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.6.0/mysql/schemaChanges.sql @@ -1,5 +0,0 @@ -ALTER TABLE data_quality_data_time_series -ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> '$.id') STORED NOT NULL, -ADD CONSTRAINT UNIQUE (id); - -CREATE INDEX data_quality_data_time_series_id_index ON data_quality_data_time_series (id); \ No newline at end of file diff --git a/bootstrap/sql/migrations/native/1.6.0/postgres/postDataMigrationSQLScript.sql b/bootstrap/sql/migrations/native/1.6.0/postgres/postDataMigrationSQLScript.sql index 9c0ca1c378cd..e4acef81dd2d 100644 --- a/bootstrap/sql/migrations/native/1.6.0/postgres/postDataMigrationSQLScript.sql +++ b/bootstrap/sql/migrations/native/1.6.0/postgres/postDataMigrationSQLScript.sql @@ -6,3 +6,12 @@ SET json = jsonb_set( ) FROM test_case tc WHERE dqdts.entityfqnHash = tc.fqnHash; + +-- Add id column to data_quality_data_time_series table +-- after we have added the id values to the records +ALTER TABLE data_quality_data_time_series +ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED, +ADD CONSTRAINT id_unique UNIQUE (id); + +-- Create index on id column +CREATE INDEX IF NOT EXISTS data_quality_data_time_series_id_index ON data_quality_data_time_series (id); diff --git a/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql b/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql index 58b8e0b1a4e8..e69de29bb2d1 100644 --- a/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql +++ b/bootstrap/sql/migrations/native/1.6.0/postgres/schemaChanges.sql @@ -1,5 +0,0 @@ -ALTER TABLE data_quality_data_time_series -ADD COLUMN id VARCHAR(36) GENERATED ALWAYS AS (json ->> 'id') STORED, -ADD CONSTRAINT id_unique UNIQUE (id); - -CREATE INDEX IF NOT EXISTS data_quality_data_time_series_id_index ON data_quality_data_time_series (id); diff --git a/ingestion/setup.py b/ingestion/setup.py index aa5bd5aa27cf..a3618aef4f00 100644 --- a/ingestion/setup.py +++ b/ingestion/setup.py @@ -45,7 +45,7 @@ "sqlalchemy-databricks": "sqlalchemy-databricks~=0.1", "databricks-sdk": "databricks-sdk>=0.18.0,<0.20.0", "trino": "trino[sqlalchemy]", - "spacy": "spacy~=3.7", + "spacy": "spacy<3.8", "looker-sdk": "looker-sdk>=22.20.0", "lkml": "lkml~=1.3", "tableau": "tableau-api-lib~=0.1", @@ -207,11 +207,8 @@ *COMMONS["datalake"], }, "datalake-s3": { - # requires aiobotocore - # https://github.com/fsspec/s3fs/blob/9bf99f763edaf7026318e150c4bd3a8d18bb3a00/requirements.txt#L1 - # however, the latest version of `s3fs` conflicts its `aiobotocore` dep with `boto3`'s dep on `botocore`. - # Leaving this marked to the automatic resolution to speed up installation. - "s3fs", + # vendoring 'boto3' to keep all dependencies aligned (s3fs, boto3, botocore, aiobotocore) + "s3fs[boto3]", *COMMONS["datalake"], }, "deltalake": {"delta-spark<=2.3.0", "deltalake~=0.17"}, @@ -343,7 +340,6 @@ "coverage", # Install GE because it's not in the `all` plugin VERSIONS["great-expectations"], - "moto~=5.0", "basedpyright~=1.14", "pytest==7.0.0", "pytest-cov", diff --git a/ingestion/src/metadata/cli/app.py b/ingestion/src/metadata/cli/app.py index 7bd888401944..b42eb136219b 100644 --- a/ingestion/src/metadata/cli/app.py +++ b/ingestion/src/metadata/cli/app.py @@ -32,6 +32,7 @@ def run_app(config_path: Path) -> None: try: config_dict = load_config_file(config_path) + # no logging for config because apps might have custom secrets workflow = ApplicationWorkflow.create(config_dict) except Exception as exc: logger.error(f"Error running the application {exc}") diff --git a/ingestion/src/metadata/cli/dataquality.py b/ingestion/src/metadata/cli/dataquality.py index 4433b274d94b..6b472bee5616 100644 --- a/ingestion/src/metadata/cli/dataquality.py +++ b/ingestion/src/metadata/cli/dataquality.py @@ -20,7 +20,7 @@ from metadata.generated.schema.entity.services.ingestionPipelines.ingestionPipeline import ( PipelineType, ) -from metadata.utils.logger import cli_logger +from metadata.utils.logger import cli_logger, redacted_config from metadata.workflow.data_quality import TestSuiteWorkflow from metadata.workflow.workflow_init_error_handler import WorkflowInitErrorHandler @@ -37,6 +37,9 @@ def run_test(config_path: Path) -> None: workflow_config_dict = None try: workflow_config_dict = load_config_file(config_path) + logger.debug( + "Using workflow config:\n%s", redacted_config(workflow_config_dict) + ) workflow = TestSuiteWorkflow.create(workflow_config_dict) except Exception as exc: logger.debug(traceback.format_exc()) diff --git a/ingestion/src/metadata/cli/ingest.py b/ingestion/src/metadata/cli/ingest.py index e52b7354076b..68fbd12c6a91 100644 --- a/ingestion/src/metadata/cli/ingest.py +++ b/ingestion/src/metadata/cli/ingest.py @@ -20,7 +20,7 @@ from metadata.generated.schema.entity.services.ingestionPipelines.ingestionPipeline import ( PipelineType, ) -from metadata.utils.logger import cli_logger +from metadata.utils.logger import cli_logger, redacted_config from metadata.workflow.metadata import MetadataWorkflow from metadata.workflow.workflow_init_error_handler import WorkflowInitErrorHandler @@ -37,6 +37,7 @@ def run_ingest(config_path: Path) -> None: config_dict = None try: config_dict = load_config_file(config_path) + logger.debug("Using workflow config:\n%s", redacted_config(config_dict)) workflow = MetadataWorkflow.create(config_dict) except Exception as exc: logger.debug(traceback.format_exc()) diff --git a/ingestion/src/metadata/cli/lineage.py b/ingestion/src/metadata/cli/lineage.py index f2246fa1cfaa..3d79cc98beff 100644 --- a/ingestion/src/metadata/cli/lineage.py +++ b/ingestion/src/metadata/cli/lineage.py @@ -27,7 +27,7 @@ from metadata.generated.schema.metadataIngestion.workflow import WorkflowConfig from metadata.ingestion.ometa.ometa_api import OpenMetadata from metadata.utils.constants import UTF_8 -from metadata.utils.logger import cli_logger +from metadata.utils.logger import cli_logger, redacted_config from metadata.workflow.workflow_init_error_handler import WorkflowInitErrorHandler logger = cli_logger() @@ -52,6 +52,7 @@ def run_lineage(config_path: Path) -> None: config_dict = None try: config_dict = load_config_file(config_path) + logger.debug("Using workflow config:\n%s", redacted_config(config_dict)) workflow = LineageWorkflow.model_validate(config_dict) except Exception as exc: diff --git a/ingestion/src/metadata/cli/profile.py b/ingestion/src/metadata/cli/profile.py index 51053f8f6311..000fdf96fc99 100644 --- a/ingestion/src/metadata/cli/profile.py +++ b/ingestion/src/metadata/cli/profile.py @@ -20,7 +20,7 @@ from metadata.generated.schema.entity.services.ingestionPipelines.ingestionPipeline import ( PipelineType, ) -from metadata.utils.logger import cli_logger +from metadata.utils.logger import cli_logger, redacted_config from metadata.workflow.profiler import ProfilerWorkflow from metadata.workflow.workflow_init_error_handler import WorkflowInitErrorHandler @@ -37,6 +37,9 @@ def run_profiler(config_path: Path) -> None: workflow_config_dict = None try: workflow_config_dict = load_config_file(config_path) + logger.debug( + "Using workflow config:\n%s", redacted_config(workflow_config_dict) + ) workflow = ProfilerWorkflow.create(workflow_config_dict) except Exception as exc: logger.debug(traceback.format_exc()) diff --git a/ingestion/src/metadata/cli/usage.py b/ingestion/src/metadata/cli/usage.py index b4a969939f2c..ebbffe8a5e9b 100644 --- a/ingestion/src/metadata/cli/usage.py +++ b/ingestion/src/metadata/cli/usage.py @@ -20,7 +20,7 @@ from metadata.generated.schema.entity.services.ingestionPipelines.ingestionPipeline import ( PipelineType, ) -from metadata.utils.logger import cli_logger +from metadata.utils.logger import cli_logger, redacted_config from metadata.workflow.usage import UsageWorkflow from metadata.workflow.workflow_init_error_handler import WorkflowInitErrorHandler @@ -37,6 +37,7 @@ def run_usage(config_path: Path) -> None: config_dict = None try: config_dict = load_config_file(config_path) + logger.debug("Using workflow config:\n%s", redacted_config(config_dict)) workflow = UsageWorkflow.create(config_dict) except Exception as exc: logger.debug(traceback.format_exc()) diff --git a/ingestion/src/metadata/data_quality/source/test_suite.py b/ingestion/src/metadata/data_quality/source/test_suite.py index f04eed72e1f6..08ebdec500d3 100644 --- a/ingestion/src/metadata/data_quality/source/test_suite.py +++ b/ingestion/src/metadata/data_quality/source/test_suite.py @@ -22,6 +22,7 @@ from metadata.generated.schema.entity.services.ingestionPipelines.status import ( StackTraceError, ) +from metadata.generated.schema.entity.services.serviceType import ServiceType from metadata.generated.schema.metadataIngestion.testSuitePipeline import ( TestSuitePipeline, ) @@ -36,6 +37,8 @@ from metadata.ingestion.api.steps import Source from metadata.ingestion.ometa.ometa_api import OpenMetadata from metadata.utils import fqn +from metadata.utils.constants import CUSTOM_CONNECTOR_PREFIX +from metadata.utils.importer import import_source_class from metadata.utils.logger import test_suite_logger logger = test_suite_logger() @@ -73,7 +76,7 @@ def _get_table_entity(self) -> Optional[Table]: table: Table = self.metadata.get_by_name( entity=Table, fqn=self.source_config.entityFullyQualifiedName.root, - fields=["tableProfilerConfig", "testSuite"], + fields=["tableProfilerConfig", "testSuite", "serviceType"], ) return table @@ -104,8 +107,16 @@ def test_connection(self) -> None: def _iter(self) -> Iterable[Either[TableAndTests]]: table: Table = self._get_table_entity() - if table: + source_type = table.serviceType.value.lower() + if source_type.startswith(CUSTOM_CONNECTOR_PREFIX): + logger.warning( + "Data quality tests might not work as expected with custom sources" + ) + else: + import_source_class( + service_type=ServiceType.Database, source_type=source_type + ) yield from self._process_table_suite(table) else: diff --git a/ingestion/src/metadata/ingestion/ometa/mixins/es_mixin.py b/ingestion/src/metadata/ingestion/ometa/mixins/es_mixin.py index 2add6520d248..62f946c09394 100644 --- a/ingestion/src/metadata/ingestion/ometa/mixins/es_mixin.py +++ b/ingestion/src/metadata/ingestion/ometa/mixins/es_mixin.py @@ -16,12 +16,15 @@ import functools import json import traceback -from typing import Generic, Iterable, List, Optional, Set, Type, TypeVar +from typing import Generic, Iterable, Iterator, List, Optional, Set, Type, TypeVar +from urllib.parse import quote_plus -from pydantic import BaseModel +from pydantic import Field +from typing_extensions import Annotated from metadata.generated.schema.entity.data.container import Container from metadata.generated.schema.entity.data.query import Query +from metadata.ingestion.models.custom_pydantic import BaseModel from metadata.ingestion.ometa.client import REST, APIError from metadata.ingestion.ometa.utils import quote from metadata.utils.elasticsearch import ES_INDEX_MAP @@ -32,6 +35,42 @@ T = TypeVar("T", bound=BaseModel) +class TotalModel(BaseModel): + """Elasticsearch total model""" + + relation: str + value: int + + +class HitsModel(BaseModel): + """Elasticsearch hits model""" + + index: Annotated[str, Field(description="Index name", alias="_index")] + type: Annotated[str, Field(description="Type of the document", alias="_type")] + id: Annotated[str, Field(description="Document ID", alias="_id")] + score: Annotated[ + Optional[float], Field(description="Score of the document", alias="_score") + ] + source: Annotated[dict, Field(description="Document source", alias="_source")] + sort: Annotated[ + List[str], + Field(description="Sort field. Used internally to get the next page FQN"), + ] + + +class ESHits(BaseModel): + """Elasticsearch hits model""" + + total: Annotated[TotalModel, Field(description="Total matched elements")] + hits: Annotated[List[HitsModel], Field(description="List of matched elements")] + + +class ESResponse(BaseModel): + """Elasticsearch response model""" + + hits: ESHits + + class ESMixin(Generic[T]): """ OpenMetadata API methods related to Elasticsearch. @@ -46,6 +85,12 @@ class ESMixin(Generic[T]): "&size={size}&index={index}" ) + # sort_field needs to be unique for the pagination to work, so we can use the FQN + paginate_query = ( + "/search/query?q=&size={size}&deleted=false{filter}&index={index}" + "&sort_field=fullyQualifiedName{after}" + ) + @functools.lru_cache(maxsize=512) def _search_es_entity( self, @@ -252,3 +297,65 @@ def es_get_queries_with_lineage(self, service_name: str) -> Optional[Set[str]]: logger.debug(traceback.format_exc()) logger.warning(f"Unknown error extracting results from ES query [{err}]") return None + + def paginate_es( + self, + entity: Type[T], + query_filter: Optional[str] = None, + size: int = 100, + fields: Optional[List[str]] = None, + ) -> Iterator[T]: + """Paginate through the ES results, ignoring individual errors""" + after: Optional[str] = None + error_pages = 0 + query = functools.partial( + self.paginate_query.format, + index=ES_INDEX_MAP[entity.__name__], + filter="&query_filter=" + quote_plus(query_filter) if query_filter else "", + size=size, + ) + while True: + query_string = query( + after="&search_after=" + quote_plus(after) if after else "" + ) + response = self._get_es_response(query_string) + + # Allow 3 errors getting pages before getting out of the loop + if not response: + error_pages += 1 + if error_pages < 3: + continue + else: + break + + # Get the data + for hit in response.hits.hits: + try: + yield self.get_by_name( + entity=entity, + fqn=hit.source["fullyQualifiedName"], + fields=fields, + nullable=False, # Raise an error if we don't find the Entity + ) + except Exception as exc: + logger.warning( + f"Error while getting {hit.source['fullyQualifiedName']} - {exc}" + ) + + # Get next page + last_hit = response.hits.hits[-1] if response.hits.hits else None + if not last_hit or not last_hit.sort: + logger.info("No more pages to fetch") + break + + after = ",".join(last_hit.sort) + + def _get_es_response(self, query_string: str) -> Optional[ESResponse]: + """Get the Elasticsearch response""" + try: + response = self.client.get(query_string) + return ESResponse.model_validate(response) + except Exception as exc: + logger.debug(traceback.format_exc()) + logger.warning(f"Error while getting ES response: {exc}") + return None diff --git a/ingestion/src/metadata/ingestion/source/database/redshift/metadata.py b/ingestion/src/metadata/ingestion/source/database/redshift/metadata.py index 232ea159733f..270eae010920 100644 --- a/ingestion/src/metadata/ingestion/source/database/redshift/metadata.py +++ b/ingestion/src/metadata/ingestion/source/database/redshift/metadata.py @@ -83,6 +83,7 @@ _get_schema_column_info, get_columns, get_table_comment, + get_view_definition, ) from metadata.ingestion.source.database.stored_procedures_mixin import ( QueryByProcedure, @@ -122,6 +123,7 @@ PGDialect._get_column_info = _get_pg_column_info # pylint: disable=protected-access RedshiftDialect.get_all_table_comments = get_all_table_comments RedshiftDialect.get_table_comment = get_table_comment +RedshiftDialect.get_view_definition = get_view_definition RedshiftDialect._get_all_relation_info = ( # pylint: disable=protected-access _get_all_relation_info ) diff --git a/ingestion/src/metadata/ingestion/source/database/redshift/queries.py b/ingestion/src/metadata/ingestion/source/database/redshift/queries.py index 95cfe84f7bba..f35cd6d1678d 100644 --- a/ingestion/src/metadata/ingestion/source/database/redshift/queries.py +++ b/ingestion/src/metadata/ingestion/source/database/redshift/queries.py @@ -236,30 +236,6 @@ # hence we are appending "create view . as " to select query # to generate the column level lineage REDSHIFT_GET_ALL_RELATIONS = """ - WITH view_defs AS ( - SELECT - c.oid, - pg_catalog.pg_get_viewdef(c.oid, true) AS view_definition, - n.nspname, - c.relname - FROM - pg_catalog.pg_class c - LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace - WHERE - c.relkind = 'v' - ), - adjusted_view_defs AS ( - SELECT - oid, - CASE - WHEN view_definition LIKE '%WITH NO SCHEMA BINDING%' THEN - REGEXP_REPLACE(view_definition, 'create view [^ ]+ as (.*WITH NO SCHEMA BINDING;?)', '\\1') - ELSE - 'CREATE VIEW ' || nspname || '.' || relname || ' AS ' || view_definition - END AS view_definition - FROM - view_defs - ) SELECT c.relkind, n.oid as "schema_oid", @@ -271,13 +247,12 @@ AS "diststyle", c.relowner AS "owner_id", u.usename AS "owner_name", - avd.view_definition + CAST(pg_catalog.pg_get_viewdef(c.oid, true) AS TEXT) AS "view_definition", pg_catalog.array_to_string(c.relacl, '\n') AS "privileges" FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner - LEFT JOIN adjusted_view_defs avd ON avd.oid = c.oid WHERE c.relkind IN ('r', 'v', 'm', 'S', 'f') AND n.nspname !~ '^pg_' {schema_clause} {table_clause} UNION diff --git a/ingestion/src/metadata/ingestion/source/database/redshift/utils.py b/ingestion/src/metadata/ingestion/source/database/redshift/utils.py index 8082d4de5fe6..82bd02c36e67 100644 --- a/ingestion/src/metadata/ingestion/source/database/redshift/utils.py +++ b/ingestion/src/metadata/ingestion/source/database/redshift/utils.py @@ -395,3 +395,22 @@ def _get_all_relation_info(self, connection, **kw): # pylint: disable=unused-ar key = RelationKey(rel.relname, rel.schema, connection) relations[key] = rel return relations + + +@reflection.cache +def get_view_definition(self, connection, view_name, schema=None, **kw): + """Return view definition. + Given a :class:`.Connection`, a string `view_name`, + and an optional string `schema`, return the view definition. + + Overrides interface + :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_definition`. + """ + view = self._get_redshift_relation(connection, view_name, schema, **kw) + pattern = re.compile("WITH NO SCHEMA BINDING", re.IGNORECASE) + view_definition = str(sa.text(pattern.sub("", view.view_definition))) + if not view_definition.startswith("create"): + view_definition = ( + f"CREATE VIEW {view.schema}.{view.relname} AS {view_definition}" + ) + return view_definition diff --git a/ingestion/src/metadata/profiler/processor/core.py b/ingestion/src/metadata/profiler/processor/core.py index ec9db6da4581..f10054a3696e 100644 --- a/ingestion/src/metadata/profiler/processor/core.py +++ b/ingestion/src/metadata/profiler/processor/core.py @@ -603,7 +603,7 @@ def get_profile(self) -> CreateTableProfileRequest: if self._system_results: system_profile = [ - SystemProfile(**system_result) + SystemProfile.model_validate(system_result) for system_result in self._system_results ] else: diff --git a/ingestion/src/metadata/utils/constants.py b/ingestion/src/metadata/utils/constants.py index fcf212d04f8e..cbb1b7b5f3e9 100644 --- a/ingestion/src/metadata/utils/constants.py +++ b/ingestion/src/metadata/utils/constants.py @@ -108,3 +108,5 @@ ENTITY_REFERENCE_TYPE_MAP = { value.__name__: key for key, value in ENTITY_REFERENCE_CLASS_MAP.items() } + +CUSTOM_CONNECTOR_PREFIX = "custom" diff --git a/ingestion/src/metadata/utils/importer.py b/ingestion/src/metadata/utils/importer.py index 23b99ffc8002..12c03eb2ecef 100644 --- a/ingestion/src/metadata/utils/importer.py +++ b/ingestion/src/metadata/utils/importer.py @@ -27,6 +27,8 @@ from metadata.generated.schema.metadataIngestion.workflow import Sink as WorkflowSink from metadata.ingestion.api.steps import BulkSink, Processor, Sink, Source, Stage from metadata.utils.class_helper import get_service_type_from_source_type +from metadata.utils.client_version import get_client_version +from metadata.utils.constants import CUSTOM_CONNECTOR_PREFIX from metadata.utils.logger import utils_logger from metadata.utils.singleton import Singleton @@ -43,6 +45,38 @@ class DynamicImportException(Exception): Raise it when having issues dynamically importing objects """ + def __init__(self, module: str, key: str = None, cause: Exception = None): + self.module = module + self.key = key + self.cause = cause + + def __str__(self): + import_path = self.module + if self.key: + import_path += f".{self.key}" + return f"Cannot import {import_path} due to {self.cause}" + + +class MissingPluginException(Exception): + """ + An excpetion that captures a missing openmetadata-ingestion plugin for a specific connector. + """ + + def __init__(self, plugin: str): + self.plugin = plugin + + def __str__(self): + try: + version = "==" + get_client_version() + except Exception: + logger.warning("unable to get client version") + logger.debug(traceback.format_exc()) + version = "" + return ( + f"You might be missing the plugin [{self.plugin}]. Try:\n" + f'pip install "openmetadata-ingestion[{self.plugin}]{version}"' + ) + def get_module_dir(type_: str) -> str: """ @@ -93,13 +127,13 @@ def import_from_module(key: str) -> Type[Any]: Dynamically import an object from a module path """ + module_name, obj_name = key.rsplit(MODULE_SEPARATOR, 1) try: - module_name, obj_name = key.rsplit(MODULE_SEPARATOR, 1) obj = getattr(importlib.import_module(module_name), obj_name) return obj except Exception as err: logger.debug(traceback.format_exc()) - raise DynamicImportException(f"Cannot load object from {key} due to {err}") + raise DynamicImportException(module=module_name, key=obj_name, cause=err) # module building strings read better with .format instead of f-strings @@ -200,7 +234,7 @@ def import_connection_fn(connection: BaseModel, function_name: str) -> Callable: # module building strings read better with .format instead of f-strings # pylint: disable=consider-using-f-string - if connection.type.value.lower().startswith("custom"): + if connection.type.value.lower().startswith(CUSTOM_CONNECTOR_PREFIX): python_class_parts = connection.sourcePythonClass.rsplit(".", 1) python_module_path = ".".join(python_class_parts[:-1]) @@ -261,9 +295,7 @@ def import_side_effects(self, *modules): SideEffectsLoader.modules.add(module.__name__) except Exception as err: logger.debug(traceback.format_exc()) - raise DynamicImportException( - f"Cannot load object from {module} due to {err}" - ) + raise DynamicImportException(module=module, cause=err) else: logger.debug(f"Module {module} already imported") diff --git a/ingestion/src/metadata/utils/logger.py b/ingestion/src/metadata/utils/logger.py index 15f724a287bf..c33eb3b96d19 100644 --- a/ingestion/src/metadata/utils/logger.py +++ b/ingestion/src/metadata/utils/logger.py @@ -13,10 +13,11 @@ """ import logging +from copy import deepcopy from enum import Enum from functools import singledispatch from types import DynamicClassAttribute -from typing import Optional, Union +from typing import Dict, Optional, Union from metadata.data_quality.api.models import ( TableAndTests, @@ -37,6 +38,8 @@ ) logging.basicConfig(format=BASE_LOGGING_FORMAT, datefmt="%Y-%m-%d %H:%M:%S") +REDACTED_KEYS = {"serviceConnection", "securityConfig"} + class Loggers(Enum): """ @@ -170,10 +173,14 @@ def set_loggers_level(level: Union[int, str] = logging.INFO): def log_ansi_encoded_string( - color: Optional[ANSI] = None, bold: bool = False, message: str = "" + color: Optional[ANSI] = None, + bold: bool = False, + message: str = "", + level=logging.INFO, ): - utils_logger().info( - f"{ANSI.BOLD.value if bold else ''}{color.value if color else ''}{message}{ANSI.ENDC.value}" + utils_logger().log( + level=level, + msg=f"{ANSI.BOLD.value if bold else ''}{color.value if color else ''}{message}{ANSI.ENDC.value}", ) @@ -260,3 +267,21 @@ def _(record: OMetaPipelineStatus) -> str: def _(record: PatchRequest) -> str: """Get the log of the new entity""" return get_log_name(record.new_entity) + + +def redacted_config(config: Dict[str, Union[str, dict]]) -> Dict[str, Union[str, dict]]: + config_copy = deepcopy(config) + + def traverse_and_modify(obj): + if isinstance(obj, dict): + for key, value in obj.items(): + if key in REDACTED_KEYS: + obj[key] = "REDACTED" + else: + traverse_and_modify(value) + elif isinstance(obj, list): + for item in obj: + traverse_and_modify(item) + + traverse_and_modify(config_copy) + return config_copy diff --git a/ingestion/src/metadata/workflow/ingestion.py b/ingestion/src/metadata/workflow/ingestion.py index 19b1a5f739e2..cfa78c1259ed 100644 --- a/ingestion/src/metadata/workflow/ingestion.py +++ b/ingestion/src/metadata/workflow/ingestion.py @@ -21,7 +21,7 @@ """ import traceback from abc import ABC, abstractmethod -from typing import List, Tuple, cast +from typing import List, Tuple, Type, cast from metadata.config.common import WorkflowExecutionError from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import ( @@ -46,6 +46,13 @@ get_service_class_from_service_type, get_service_type_from_source_type, ) +from metadata.utils.constants import CUSTOM_CONNECTOR_PREFIX +from metadata.utils.importer import ( + DynamicImportException, + MissingPluginException, + import_from_module, + import_source_class, +) from metadata.utils.logger import ingestion_logger from metadata.workflow.base import BaseWorkflow, InvalidWorkflowJSONException from metadata.workflow.workflow_status_mixin import SUCCESS_THRESHOLD_VALUE @@ -218,3 +225,22 @@ def validate(self): raise WorkflowExecutionError( f"Profiler is not supported for the service connection: {self.config.source.serviceConnection}" ) + + def import_source_class(self) -> Type[Source]: + source_type = self.config.source.type.lower() + try: + return ( + import_from_module( + self.config.source.serviceConnection.root.config.sourcePythonClass + ) + if source_type.startswith(CUSTOM_CONNECTOR_PREFIX) + else import_source_class( + service_type=self.service_type, source_type=source_type + ) + ) + except DynamicImportException as e: + if source_type.startswith(CUSTOM_CONNECTOR_PREFIX): + raise e + logger.debug(traceback.format_exc()) + logger.error(f"Failed to import source of type '{source_type}'") + raise MissingPluginException(source_type) diff --git a/ingestion/src/metadata/workflow/metadata.py b/ingestion/src/metadata/workflow/metadata.py index b440db980288..204299bb5db9 100644 --- a/ingestion/src/metadata/workflow/metadata.py +++ b/ingestion/src/metadata/workflow/metadata.py @@ -14,11 +14,7 @@ from metadata.config.common import WorkflowExecutionError from metadata.ingestion.api.steps import Sink, Source -from metadata.utils.importer import ( - import_from_module, - import_sink_class, - import_source_class, -) +from metadata.utils.importer import import_sink_class from metadata.utils.logger import ingestion_logger from metadata.workflow.ingestion import IngestionWorkflow @@ -47,15 +43,7 @@ def _get_source(self) -> Source: "configuration here: https://docs.open-metadata.org/connectors" ) - source_class = ( - import_from_module( - self.config.source.serviceConnection.root.config.sourcePythonClass - ) - if source_type.startswith("custom") - else import_source_class( - service_type=self.service_type, source_type=source_type - ) - ) + source_class = self.import_source_class() pipeline_name = ( self.ingestion_pipeline.fullyQualifiedName.root diff --git a/ingestion/src/metadata/workflow/profiler.py b/ingestion/src/metadata/workflow/profiler.py index b0363fbc9878..c6987fb0ef13 100644 --- a/ingestion/src/metadata/workflow/profiler.py +++ b/ingestion/src/metadata/workflow/profiler.py @@ -44,6 +44,7 @@ def __init__(self, config: OpenMetadataWorkflowConfig): def _get_source_class(self): if self.config.source.serviceName: + self.import_source_class() return OpenMetadataSource logger.info( "Database Service name not provided, we will scan all the tables " diff --git a/ingestion/src/metadata/workflow/usage.py b/ingestion/src/metadata/workflow/usage.py index aeab26a13ec7..e1d9368f0336 100644 --- a/ingestion/src/metadata/workflow/usage.py +++ b/ingestion/src/metadata/workflow/usage.py @@ -16,9 +16,7 @@ from metadata.ingestion.api.steps import BulkSink, Processor, Source, Stage from metadata.utils.importer import ( import_bulk_sink_type, - import_from_module, import_processor_class, - import_source_class, import_stage_class, ) from metadata.utils.logger import ingestion_logger @@ -51,16 +49,7 @@ def _get_source(self) -> Source: "configuration here: https://docs.open-metadata.org/connectors" ) - source_class = ( - import_from_module( - self.config.source.serviceConnection.root.config.sourcePythonClass - ) - if source_type.startswith("custom") - else import_source_class( - service_type=self.service_type, source_type=source_type - ) - ) - + source_class = self.import_source_class() source: Source = source_class.create( self.config.source.model_dump(), self.metadata ) diff --git a/ingestion/src/metadata/workflow/workflow_init_error_handler.py b/ingestion/src/metadata/workflow/workflow_init_error_handler.py index ff1cffa26e0f..0e7c614762f3 100644 --- a/ingestion/src/metadata/workflow/workflow_init_error_handler.py +++ b/ingestion/src/metadata/workflow/workflow_init_error_handler.py @@ -12,6 +12,7 @@ """ Module handles the init error messages from different workflows """ +import logging import traceback from pathlib import Path from typing import Any, Dict, Optional, Type, Union @@ -25,7 +26,7 @@ ParsingConfigurationError, ) from metadata.utils.constants import UTF_8 -from metadata.utils.logger import ANSI, log_ansi_encoded_string +from metadata.utils.logger import ANSI, log_ansi_encoded_string, utils_logger EXAMPLES_WORKFLOW_PATH: Path = Path(__file__).parent / "../examples" / "workflows" @@ -74,10 +75,10 @@ def print_init_error( source_type_name, pipeline_type ) else: + utils_logger().debug(traceback.format_exc()) WorkflowInitErrorHandler._print_error_msg( f"\nError initializing {pipeline_type.name}: {exc}" ) - WorkflowInitErrorHandler._print_error_msg(traceback.format_exc()) WorkflowInitErrorHandler._print_more_info(pipeline_type) @@ -151,4 +152,15 @@ def _print_error_msg(msg: str) -> None: """ Print message with error style """ - log_ansi_encoded_string(color=ANSI.BRIGHT_RED, bold=False, message=f"{msg}") + log_ansi_encoded_string( + color=ANSI.BRIGHT_RED, bold=False, message=f"{msg}", level=logging.ERROR + ) + + @staticmethod + def _print_debug_msg(msg: str) -> None: + """ + Print message with error style + """ + log_ansi_encoded_string( + color=ANSI.YELLOW, bold=False, message=f"{msg}", level=logging.DEBUG + ) diff --git a/ingestion/tests/integration/conftest.py b/ingestion/tests/integration/conftest.py index 3c987e5aa68f..81f19a2ea878 100644 --- a/ingestion/tests/integration/conftest.py +++ b/ingestion/tests/integration/conftest.py @@ -15,7 +15,8 @@ from metadata.workflow.ingestion import IngestionWorkflow if not sys.version_info >= (3, 9): - collect_ignore = ["trino", "kafka"] + # these tests use test-containers which are not supported in python 3.8 + collect_ignore = ["trino", "kafka", "datalake"] @pytest.fixture(scope="session", autouse=True) diff --git a/ingestion/tests/integration/containers.py b/ingestion/tests/integration/containers.py index 9483f2468a4d..3bf46b799c18 100644 --- a/ingestion/tests/integration/containers.py +++ b/ingestion/tests/integration/containers.py @@ -53,7 +53,7 @@ class MinioContainerConfigs: access_key: str = "minio" secret_key: str = "password" port: int = 9000 - container_name: str = "test-minio" + container_name: Optional[str] = None exposed_port: Optional[int] = None def with_exposed_port(self, container): diff --git a/ingestion/tests/integration/datalake/conftest.py b/ingestion/tests/integration/datalake/conftest.py index 1ed88fa8ffb6..337bea1081af 100644 --- a/ingestion/tests/integration/datalake/conftest.py +++ b/ingestion/tests/integration/datalake/conftest.py @@ -14,16 +14,16 @@ import os from copy import deepcopy -import boto3 import pytest -from moto import mock_aws from metadata.generated.schema.entity.services.databaseService import DatabaseService from metadata.workflow.data_quality import TestSuiteWorkflow from metadata.workflow.metadata import MetadataWorkflow from metadata.workflow.profiler import ProfilerWorkflow -BUCKET_NAME = "MyBucket" +from ..containers import MinioContainerConfigs, get_minio_container + +BUCKET_NAME = "my-bucket" INGESTION_CONFIG = { "source": { @@ -77,7 +77,7 @@ "sourceConfig": { "config": { "type": "TestSuite", - "entityFullyQualifiedName": 'datalake_for_integration_tests.default.MyBucket."users.csv"', + "entityFullyQualifiedName": f'datalake_for_integration_tests.default.{BUCKET_NAME}."users.csv"', } }, }, @@ -128,31 +128,19 @@ } -@pytest.fixture(scope="module", autouse=True) -def aws(): - with mock_aws(): - yield boto3.client("s3", region_name="us-east-1") +@pytest.fixture(scope="session") +def minio_container(): + with get_minio_container(MinioContainerConfigs()) as container: + yield container @pytest.fixture(scope="class", autouse=True) -def setup_s3(request) -> None: +def setup_s3(minio_container) -> None: # Mock our S3 bucket and ingest a file - boto3.DEFAULT_SESSION = None - request.cls.s3_client = boto3.client( - "s3", - region_name="us-west-1", - ) - s3 = boto3.resource( - "s3", - region_name="us-west-1", - aws_access_key_id="fake_access_key", - aws_secret_access_key="fake_secret_key", - ) - request.cls.s3_client.create_bucket( - Bucket=BUCKET_NAME, - CreateBucketConfiguration={"LocationConstraint": "us-west-1"}, - ) - s3.meta.client.head_bucket(Bucket=BUCKET_NAME) + client = minio_container.get_client() + if client.bucket_exists(BUCKET_NAME): + return + client.make_bucket(BUCKET_NAME) current_dir = os.path.dirname(__file__) resources_dir = os.path.join(current_dir, "resources") @@ -161,23 +149,31 @@ def setup_s3(request) -> None: for path, _, files in os.walk(resources_dir) for filename in files ] - - request.cls.s3_keys = [] - for path in resources_paths: key = os.path.relpath(path, resources_dir) - request.cls.s3_keys.append(key) - request.cls.s3_client.upload_file(Filename=path, Bucket=BUCKET_NAME, Key=key) - yield - bucket = s3.Bucket(BUCKET_NAME) - for key in bucket.objects.all(): - key.delete() - bucket.delete() + client.fput_object(BUCKET_NAME, key, path) + return + + +@pytest.fixture(scope="class") +def ingestion_config(minio_container): + ingestion_config = deepcopy(INGESTION_CONFIG) + ingestion_config["source"]["serviceConnection"]["config"]["configSource"].update( + { + "securityConfig": { + "awsAccessKeyId": minio_container.access_key, + "awsSecretAccessKey": minio_container.secret_key, + "awsRegion": "us-west-1", + "endPointURL": f"http://localhost:{minio_container.get_exposed_port(minio_container.port)}", + } + } + ) + return ingestion_config @pytest.fixture(scope="class") -def run_ingestion(metadata): - ingestion_workflow = MetadataWorkflow.create(INGESTION_CONFIG) +def run_ingestion(metadata, ingestion_config): + ingestion_workflow = MetadataWorkflow.create(ingestion_config) ingestion_workflow.execute() ingestion_workflow.raise_from_status() ingestion_workflow.stop() @@ -188,28 +184,31 @@ def run_ingestion(metadata): metadata.delete(DatabaseService, db_service.id, recursive=True, hard_delete=True) -@pytest.fixture -def run_test_suite_workflow(run_ingestion): - ingestion_workflow = TestSuiteWorkflow.create(DATA_QUALITY_CONFIG) +@pytest.fixture(scope="class") +def run_test_suite_workflow(run_ingestion, ingestion_config): + workflow_config = deepcopy(DATA_QUALITY_CONFIG) + workflow_config["source"]["serviceConnection"] = ingestion_config["source"][ + "serviceConnection" + ] + ingestion_workflow = TestSuiteWorkflow.create(workflow_config) ingestion_workflow.execute() ingestion_workflow.raise_from_status() ingestion_workflow.stop() -@pytest.fixture(scope="session") -def profiler_workflow_config(workflow_config): - config = deepcopy(INGESTION_CONFIG) - config["source"]["sourceConfig"]["config"].update( +@pytest.fixture(scope="class") +def profiler_workflow_config(ingestion_config, workflow_config): + ingestion_config["source"]["sourceConfig"]["config"].update( { "type": "Profiler", } ) - config["processor"] = { + ingestion_config["processor"] = { "type": "orm-profiler", "config": {}, } - config["workflowConfig"] = workflow_config - return config + ingestion_config["workflowConfig"] = workflow_config + return ingestion_config @pytest.fixture() diff --git a/ingestion/tests/integration/orm_profiler/resources/profiler_test_.csv b/ingestion/tests/integration/datalake/resources/profiler_test_.csv similarity index 100% rename from ingestion/tests/integration/orm_profiler/resources/profiler_test_.csv rename to ingestion/tests/integration/datalake/resources/profiler_test_.csv diff --git a/ingestion/tests/integration/datalake/test_datalake_profiler_e2e.py b/ingestion/tests/integration/datalake/test_datalake_profiler_e2e.py new file mode 100644 index 000000000000..1a8244348a43 --- /dev/null +++ b/ingestion/tests/integration/datalake/test_datalake_profiler_e2e.py @@ -0,0 +1,311 @@ +# Copyright 2021 Collate +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Test Datalake Profiler workflow + +To run this we need OpenMetadata server up and running. + +No sample data is required beforehand +""" +import pytest + +from ingestion.tests.integration.datalake.conftest import BUCKET_NAME +from metadata.generated.schema.entity.data.table import ColumnProfile, Table +from metadata.utils.time_utils import ( + get_beginning_of_day_timestamp_mill, + get_end_of_day_timestamp_mill, +) +from metadata.workflow.profiler import ProfilerWorkflow +from metadata.workflow.workflow_output_handler import WorkflowResultStatus + + +@pytest.fixture(scope="class", autouse=True) +def before_each(run_ingestion): + pass + + +class TestDatalakeProfilerTestE2E: + """datalake profiler E2E test""" + + def test_datalake_profiler_workflow(self, ingestion_config, metadata): + ingestion_config["source"]["sourceConfig"]["config"].update( + { + "type": "Profiler", + } + ) + ingestion_config["processor"] = { + "type": "orm-profiler", + "config": {}, + } + + profiler_workflow = ProfilerWorkflow.create(ingestion_config) + profiler_workflow.execute() + status = profiler_workflow.result_status() + profiler_workflow.stop() + + assert status == WorkflowResultStatus.SUCCESS + + table_profile = metadata.get_profile_data( + f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + get_beginning_of_day_timestamp_mill(), + get_end_of_day_timestamp_mill(), + ) + + column_profile = metadata.get_profile_data( + f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv".first_name', + get_beginning_of_day_timestamp_mill(), + get_end_of_day_timestamp_mill(), + profile_type=ColumnProfile, + ) + + assert table_profile.entities + assert column_profile.entities + + def test_values_partitioned_datalake_profiler_workflow( + self, metadata, ingestion_config + ): + """Test partitioned datalake profiler workflow""" + ingestion_config["source"]["sourceConfig"]["config"].update( + { + "type": "Profiler", + } + ) + ingestion_config["processor"] = { + "type": "orm-profiler", + "config": { + "tableConfig": [ + { + "fullyQualifiedName": f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + "partitionConfig": { + "enablePartitioning": "true", + "partitionColumnName": "first_name", + "partitionIntervalType": "COLUMN-VALUE", + "partitionValues": ["John"], + }, + } + ] + }, + } + + profiler_workflow = ProfilerWorkflow.create(ingestion_config) + profiler_workflow.execute() + status = profiler_workflow.result_status() + profiler_workflow.stop() + + assert status == WorkflowResultStatus.SUCCESS + + table = metadata.get_by_name( + entity=Table, + fqn=f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + fields=["tableProfilerConfig"], + nullable=False, + ) + + profile = metadata.get_latest_table_profile(table.fullyQualifiedName).profile + + assert profile.rowCount == 1.0 + + def test_datetime_partitioned_datalake_profiler_workflow( + self, ingestion_config, metadata + ): + """Test partitioned datalake profiler workflow""" + ingestion_config["source"]["sourceConfig"]["config"].update( + { + "type": "Profiler", + } + ) + ingestion_config["processor"] = { + "type": "orm-profiler", + "config": { + "tableConfig": [ + { + "fullyQualifiedName": f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + "partitionConfig": { + "enablePartitioning": "true", + "partitionColumnName": "birthdate", + "partitionIntervalType": "TIME-UNIT", + "partitionIntervalUnit": "YEAR", + "partitionInterval": 35, + }, + } + ], + }, + } + + profiler_workflow = ProfilerWorkflow.create(ingestion_config) + profiler_workflow.execute() + status = profiler_workflow.result_status() + profiler_workflow.stop() + + assert status == WorkflowResultStatus.SUCCESS + + table = metadata.get_by_name( + entity=Table, + fqn=f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + fields=["tableProfilerConfig"], + ) + + profile = metadata.get_latest_table_profile(table.fullyQualifiedName).profile + + assert profile.rowCount == 2.0 + + def test_integer_range_partitioned_datalake_profiler_workflow( + self, ingestion_config, metadata + ): + """Test partitioned datalake profiler workflow""" + ingestion_config["source"]["sourceConfig"]["config"].update( + { + "type": "Profiler", + } + ) + ingestion_config["processor"] = { + "type": "orm-profiler", + "config": { + "tableConfig": [ + { + "fullyQualifiedName": f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + "profileSample": 100, + "partitionConfig": { + "enablePartitioning": "true", + "partitionColumnName": "age", + "partitionIntervalType": "INTEGER-RANGE", + "partitionIntegerRangeStart": 35, + "partitionIntegerRangeEnd": 44, + }, + } + ], + }, + } + + profiler_workflow = ProfilerWorkflow.create(ingestion_config) + profiler_workflow.execute() + status = profiler_workflow.result_status() + profiler_workflow.stop() + + assert status == WorkflowResultStatus.SUCCESS + + table = metadata.get_by_name( + entity=Table, + fqn=f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + fields=["tableProfilerConfig"], + ) + + profile = metadata.get_latest_table_profile(table.fullyQualifiedName).profile + + assert profile.rowCount == 2.0 + + def test_datalake_profiler_workflow_with_custom_profiler_config( + self, metadata, ingestion_config + ): + """Test custom profiler config return expected sample and metric computation""" + profiler_metrics = [ + "MIN", + "MAX", + "MEAN", + "MEDIAN", + ] + id_metrics = ["MIN", "MAX"] + non_metric_values = ["name", "timestamp"] + + ingestion_config["source"]["sourceConfig"]["config"].update( + { + "type": "Profiler", + } + ) + ingestion_config["processor"] = { + "type": "orm-profiler", + "config": { + "profiler": { + "name": "ingestion_profiler", + "metrics": profiler_metrics, + }, + "tableConfig": [ + { + "fullyQualifiedName": f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + "columnConfig": { + "includeColumns": [ + {"columnName": "id", "metrics": id_metrics}, + {"columnName": "age"}, + ] + }, + } + ], + }, + } + + profiler_workflow = ProfilerWorkflow.create(ingestion_config) + profiler_workflow.execute() + status = profiler_workflow.result_status() + profiler_workflow.stop() + + assert status == WorkflowResultStatus.SUCCESS + + table = metadata.get_by_name( + entity=Table, + fqn=f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv"', + fields=["tableProfilerConfig"], + ) + + id_profile = metadata.get_profile_data( + f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv".id', + get_beginning_of_day_timestamp_mill(), + get_end_of_day_timestamp_mill(), + profile_type=ColumnProfile, + ).entities + + latest_id_profile = max(id_profile, key=lambda o: o.timestamp.root) + + id_metric_ln = 0 + for metric_name, metric in latest_id_profile: + if metric_name.upper() in id_metrics: + assert metric is not None + id_metric_ln += 1 + else: + assert metric is None if metric_name not in non_metric_values else True + + assert id_metric_ln == len(id_metrics) + + age_profile = metadata.get_profile_data( + f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv".age', + get_beginning_of_day_timestamp_mill(), + get_end_of_day_timestamp_mill(), + profile_type=ColumnProfile, + ).entities + + latest_age_profile = max(age_profile, key=lambda o: o.timestamp.root) + + age_metric_ln = 0 + for metric_name, metric in latest_age_profile: + if metric_name.upper() in profiler_metrics: + assert metric is not None + age_metric_ln += 1 + else: + assert metric is None if metric_name not in non_metric_values else True + + assert age_metric_ln == len(profiler_metrics) + + latest_exc_timestamp = latest_age_profile.timestamp.root + first_name_profile = metadata.get_profile_data( + f'{ingestion_config["source"]["serviceName"]}.default.{BUCKET_NAME}."profiler_test_.csv".first_name_profile', + get_beginning_of_day_timestamp_mill(), + get_end_of_day_timestamp_mill(), + profile_type=ColumnProfile, + ).entities + + assert not [ + p for p in first_name_profile if p.timestamp.root == latest_exc_timestamp + ] + + sample_data = metadata.get_sample_data(table) + assert sorted([c.root for c in sample_data.sampleData.columns]) == sorted( + ["id", "age"] + ) diff --git a/ingestion/tests/integration/datalake/test_ingestion.py b/ingestion/tests/integration/datalake/test_ingestion.py index 58c1847fee07..1bfff44f1c7c 100644 --- a/ingestion/tests/integration/datalake/test_ingestion.py +++ b/ingestion/tests/integration/datalake/test_ingestion.py @@ -13,6 +13,7 @@ import pytest +from ingestion.tests.integration.datalake.conftest import BUCKET_NAME from metadata.generated.schema.entity.data.table import DataType, Table from metadata.ingestion.ometa.models import EntityList from metadata.ingestion.ometa.ometa_api import OpenMetadata @@ -37,11 +38,15 @@ def test_ingestion(self, run_ingestion): ) # type: ignore entities = resp.entities - assert len(entities) == 4 + assert len(entities) == 5 names = [entity.name.root for entity in entities] - assert {"names.json", "names.jsonl", "new_users.parquet", "users.csv"} == set( - names - ) + assert { + "names.json", + "names.jsonl", + "new_users.parquet", + "users.csv", + "profiler_test_.csv", + } == set(names) for entity in entities: columns = entity.columns @@ -53,7 +58,7 @@ def test_profiler(self, run_profiler): """Also excluding the test for parquet files until the above is fixed""" csv_ = self.metadata.get_by_name( entity=Table, - fqn='datalake_for_integration_tests.default.MyBucket."users.csv"', + fqn=f'datalake_for_integration_tests.default.{BUCKET_NAME}."users.csv"', fields=["tableProfilerConfig"], ) # parquet_ = self.metadata.get_by_name( @@ -63,13 +68,13 @@ def test_profiler(self, run_profiler): # ) json_ = self.metadata.get_by_name( entity=Table, - fqn='datalake_for_integration_tests.default.MyBucket."names.json"', + fqn=f'datalake_for_integration_tests.default.{BUCKET_NAME}."names.json"', fields=["tableProfilerConfig"], ) jsonl_ = self.metadata.get_by_name( entity=Table, - fqn='datalake_for_integration_tests.default.MyBucket."names.jsonl"', + fqn=f'datalake_for_integration_tests.default.{BUCKET_NAME}."names.jsonl"', fields=["tableProfilerConfig"], ) diff --git a/ingestion/tests/integration/ometa/test_ometa_es_api.py b/ingestion/tests/integration/ometa/test_ometa_es_api.py index c922d7266850..a66d57c0a9a2 100644 --- a/ingestion/tests/integration/ometa/test_ometa_es_api.py +++ b/ingestion/tests/integration/ometa/test_ometa_es_api.py @@ -15,7 +15,9 @@ import time import uuid from unittest import TestCase +from unittest.mock import patch +import pytest from requests.utils import quote from metadata.generated.schema.api.data.createDatabase import CreateDatabaseRequest @@ -46,10 +48,12 @@ from metadata.generated.schema.security.client.openMetadataJWTClientConfig import ( OpenMetadataJWTClientConfig, ) -from metadata.generated.schema.type.basic import SqlQuery +from metadata.generated.schema.type.basic import EntityName, SqlQuery from metadata.ingestion.ometa.ometa_api import OpenMetadata from metadata.utils import fqn +from ..integration_base import get_create_entity + class OMetaESTest(TestCase): """ @@ -295,3 +299,67 @@ def test_get_queries_with_lineage(self): """Check the payload from ES""" res = self.metadata.es_get_queries_with_lineage(self.service.name.root) self.assertIn(self.checksum, res) + + def test_paginate_no_filter(self): + """We can paginate all the data""" + # Since the test can run in parallel with other tables being there, we just + # want to check we are actually getting some results + for asset in self.metadata.paginate_es(entity=Table, size=2): + assert asset + break + + def test_paginate_with_errors(self): + """We don't want to stop the ES yields just because a single Entity has an error""" + # 1. First, prepare some tables + for name in [f"table_{i}" for i in range(10)]: + self.metadata.create_or_update( + data=get_create_entity( + entity=Table, + name=EntityName(name), + reference=self.create_schema_entity.fullyQualifiedName, + ) + ) + + # 2. We'll fetch the entities, but we need to force a failure to ensure we can recover + error_name = fqn._build( + self.service_entity.name.root, + self.create_db_entity.name.root, + self.create_schema_entity.name.root, + "table_5", + ) + ok_name = fqn._build( + self.service_entity.name.root, + self.create_db_entity.name.root, + self.create_schema_entity.name.root, + "table_6", + ) + + rest_client = self.metadata.client + original_get = rest_client.get + with patch.object(rest_client, "get", wraps=rest_client.get) as mock_get: + + def side_effect(path: str, data=None): + # In case we pass filters as well, use `in path` rather than == + if f"/tables/name/{error_name}" in path: + raise RuntimeError("Error") + return original_get(path, data) + + mock_get.side_effect = side_effect + + # Validate we are raising the error + with pytest.raises(RuntimeError): + self.metadata.get_by_name(entity=Table, fqn=error_name) + + # This works + self.metadata.get_by_name(entity=Table, fqn=ok_name) + + query_filter = ( + '{"query":{"bool":{"must":[{"bool":{"should":[{"term":' + f'{{"service.displayName.keyword":"{self.service_entity.name.root}"}}}}]}}}}]}}}}}}' + ) + assets = list( + self.metadata.paginate_es( + entity=Table, query_filter=query_filter, size=2 + ) + ) + assert len(assets) == 10 diff --git a/ingestion/tests/integration/orm_profiler/test_datalake_profiler_e2e.py b/ingestion/tests/integration/orm_profiler/test_datalake_profiler_e2e.py deleted file mode 100644 index 7106be330ee0..000000000000 --- a/ingestion/tests/integration/orm_profiler/test_datalake_profiler_e2e.py +++ /dev/null @@ -1,440 +0,0 @@ -# Copyright 2021 Collate -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Test Datalake Profiler workflow - -To run this we need OpenMetadata server up and running. - -No sample data is required beforehand -""" - -import os -from copy import deepcopy -from pathlib import Path -from unittest import TestCase - -import boto3 -import botocore -from moto import mock_aws - -from metadata.generated.schema.entity.data.table import ColumnProfile, Table -from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import ( - OpenMetadataConnection, -) -from metadata.generated.schema.entity.services.databaseService import DatabaseService -from metadata.generated.schema.security.client.openMetadataJWTClientConfig import ( - OpenMetadataJWTClientConfig, -) -from metadata.ingestion.ometa.ometa_api import OpenMetadata -from metadata.utils.time_utils import ( - get_beginning_of_day_timestamp_mill, - get_end_of_day_timestamp_mill, -) -from metadata.workflow.metadata import MetadataWorkflow -from metadata.workflow.profiler import ProfilerWorkflow -from metadata.workflow.workflow_output_handler import WorkflowResultStatus - -SERVICE_NAME = Path(__file__).stem -REGION = "us-west-1" -BUCKET_NAME = "MyBucket" -INGESTION_CONFIG = { - "source": { - "type": "datalake", - "serviceName": SERVICE_NAME, - "serviceConnection": { - "config": { - "type": "Datalake", - "configSource": { - "securityConfig": { - "awsAccessKeyId": "fake_access_key", - "awsSecretAccessKey": "fake_secret_key", - "awsRegion": REGION, - } - }, - "bucketName": f"{BUCKET_NAME}", - } - }, - "sourceConfig": {"config": {"type": "DatabaseMetadata"}}, - }, - "sink": {"type": "metadata-rest", "config": {}}, - "workflowConfig": { - "openMetadataServerConfig": { - "hostPort": "http://localhost:8585/api", - "authProvider": "openmetadata", - "securityConfig": { - "jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg" - }, - } - }, -} - - -@mock_aws -class DatalakeProfilerTestE2E(TestCase): - """datalake profiler E2E test""" - - @classmethod - def setUpClass(cls) -> None: - server_config = OpenMetadataConnection( - hostPort="http://localhost:8585/api", - authProvider="openmetadata", - securityConfig=OpenMetadataJWTClientConfig( - jwtToken="eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg" - ), - ) # type: ignore - cls.metadata = OpenMetadata(server_config) - - def setUp(self) -> None: - # Mock our S3 bucket and ingest a file - boto3.DEFAULT_SESSION = None - self.client = boto3.client( - "s3", - region_name=REGION, - ) - - # check that we are not running our test against a real bucket - try: - s3 = boto3.resource( - "s3", - region_name=REGION, - aws_access_key_id="fake_access_key", - aws_secret_access_key="fake_secret_key", - ) - s3.meta.client.head_bucket(Bucket=BUCKET_NAME) - except botocore.exceptions.ClientError: - pass - else: - err = f"{BUCKET_NAME} should not exist." - raise EnvironmentError(err) - self.client.create_bucket( - Bucket=BUCKET_NAME, - CreateBucketConfiguration={"LocationConstraint": REGION}, - ) - current_dir = os.path.dirname(__file__) - resources_dir = os.path.join(current_dir, "resources") - - resources_paths = [ - os.path.join(path, filename) - for path, _, files in os.walk(resources_dir) - for filename in files - ] - - self.s3_keys = [] - - for path in resources_paths: - key = os.path.relpath(path, resources_dir) - self.s3_keys.append(key) - self.client.upload_file(Filename=path, Bucket=BUCKET_NAME, Key=key) - - # Ingest our S3 data - ingestion_workflow = MetadataWorkflow.create(INGESTION_CONFIG) - ingestion_workflow.execute() - ingestion_workflow.raise_from_status() - ingestion_workflow.print_status() - ingestion_workflow.stop() - - def test_datalake_profiler_workflow(self): - workflow_config = deepcopy(INGESTION_CONFIG) - workflow_config["source"]["sourceConfig"]["config"].update( - { - "type": "Profiler", - } - ) - workflow_config["processor"] = { - "type": "orm-profiler", - "config": {}, - } - - profiler_workflow = ProfilerWorkflow.create(workflow_config) - profiler_workflow.execute() - status = profiler_workflow.result_status() - profiler_workflow.stop() - - assert status == WorkflowResultStatus.SUCCESS - - table_profile = self.metadata.get_profile_data( - f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - get_beginning_of_day_timestamp_mill(), - get_end_of_day_timestamp_mill(), - ) - - column_profile = self.metadata.get_profile_data( - f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv".first_name', - get_beginning_of_day_timestamp_mill(), - get_end_of_day_timestamp_mill(), - profile_type=ColumnProfile, - ) - - assert table_profile.entities - assert column_profile.entities - - def test_values_partitioned_datalake_profiler_workflow(self): - """Test partitioned datalake profiler workflow""" - workflow_config = deepcopy(INGESTION_CONFIG) - workflow_config["source"]["sourceConfig"]["config"].update( - { - "type": "Profiler", - } - ) - workflow_config["processor"] = { - "type": "orm-profiler", - "config": { - "tableConfig": [ - { - "fullyQualifiedName": f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - "partitionConfig": { - "enablePartitioning": "true", - "partitionColumnName": "first_name", - "partitionIntervalType": "COLUMN-VALUE", - "partitionValues": ["John"], - }, - } - ] - }, - } - - profiler_workflow = ProfilerWorkflow.create(workflow_config) - profiler_workflow.execute() - status = profiler_workflow.result_status() - profiler_workflow.stop() - - assert status == WorkflowResultStatus.SUCCESS - - table = self.metadata.get_by_name( - entity=Table, - fqn=f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - fields=["tableProfilerConfig"], - ) - - profile = self.metadata.get_latest_table_profile( - table.fullyQualifiedName - ).profile - - assert profile.rowCount == 1.0 - - def test_datetime_partitioned_datalake_profiler_workflow(self): - """Test partitioned datalake profiler workflow""" - workflow_config = deepcopy(INGESTION_CONFIG) - workflow_config["source"]["sourceConfig"]["config"].update( - { - "type": "Profiler", - } - ) - workflow_config["processor"] = { - "type": "orm-profiler", - "config": { - "tableConfig": [ - { - "fullyQualifiedName": f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - "partitionConfig": { - "enablePartitioning": "true", - "partitionColumnName": "birthdate", - "partitionIntervalType": "TIME-UNIT", - "partitionIntervalUnit": "YEAR", - "partitionInterval": 35, - }, - } - ], - }, - } - - profiler_workflow = ProfilerWorkflow.create(workflow_config) - profiler_workflow.execute() - status = profiler_workflow.result_status() - profiler_workflow.stop() - - assert status == WorkflowResultStatus.SUCCESS - - table = self.metadata.get_by_name( - entity=Table, - fqn=f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - fields=["tableProfilerConfig"], - ) - - profile = self.metadata.get_latest_table_profile( - table.fullyQualifiedName - ).profile - - assert profile.rowCount == 2.0 - - def test_integer_range_partitioned_datalake_profiler_workflow(self): - """Test partitioned datalake profiler workflow""" - workflow_config = deepcopy(INGESTION_CONFIG) - workflow_config["source"]["sourceConfig"]["config"].update( - { - "type": "Profiler", - } - ) - workflow_config["processor"] = { - "type": "orm-profiler", - "config": { - "tableConfig": [ - { - "fullyQualifiedName": f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - "profileSample": 100, - "partitionConfig": { - "enablePartitioning": "true", - "partitionColumnName": "age", - "partitionIntervalType": "INTEGER-RANGE", - "partitionIntegerRangeStart": 35, - "partitionIntegerRangeEnd": 44, - }, - } - ], - }, - } - - profiler_workflow = ProfilerWorkflow.create(workflow_config) - profiler_workflow.execute() - status = profiler_workflow.result_status() - profiler_workflow.stop() - - assert status == WorkflowResultStatus.SUCCESS - - table = self.metadata.get_by_name( - entity=Table, - fqn=f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - fields=["tableProfilerConfig"], - ) - - profile = self.metadata.get_latest_table_profile( - table.fullyQualifiedName - ).profile - - assert profile.rowCount == 2.0 - - def test_datalake_profiler_workflow_with_custom_profiler_config(self): - """Test custom profiler config return expected sample and metric computation""" - profiler_metrics = [ - "MIN", - "MAX", - "MEAN", - "MEDIAN", - ] - id_metrics = ["MIN", "MAX"] - non_metric_values = ["name", "timestamp"] - - workflow_config = deepcopy(INGESTION_CONFIG) - workflow_config["source"]["sourceConfig"]["config"].update( - { - "type": "Profiler", - } - ) - workflow_config["processor"] = { - "type": "orm-profiler", - "config": { - "profiler": { - "name": "ingestion_profiler", - "metrics": profiler_metrics, - }, - "tableConfig": [ - { - "fullyQualifiedName": f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - "columnConfig": { - "includeColumns": [ - {"columnName": "id", "metrics": id_metrics}, - {"columnName": "age"}, - ] - }, - } - ], - }, - } - - profiler_workflow = ProfilerWorkflow.create(workflow_config) - profiler_workflow.execute() - status = profiler_workflow.result_status() - profiler_workflow.stop() - - assert status == WorkflowResultStatus.SUCCESS - - table = self.metadata.get_by_name( - entity=Table, - fqn=f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv"', - fields=["tableProfilerConfig"], - ) - - id_profile = self.metadata.get_profile_data( - f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv".id', - get_beginning_of_day_timestamp_mill(), - get_end_of_day_timestamp_mill(), - profile_type=ColumnProfile, - ).entities - - latest_id_profile = max(id_profile, key=lambda o: o.timestamp.root) - - id_metric_ln = 0 - for metric_name, metric in latest_id_profile: - if metric_name.upper() in id_metrics: - assert metric is not None - id_metric_ln += 1 - else: - assert metric is None if metric_name not in non_metric_values else True - - assert id_metric_ln == len(id_metrics) - - age_profile = self.metadata.get_profile_data( - f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv".age', - get_beginning_of_day_timestamp_mill(), - get_end_of_day_timestamp_mill(), - profile_type=ColumnProfile, - ).entities - - latest_age_profile = max(age_profile, key=lambda o: o.timestamp.root) - - age_metric_ln = 0 - for metric_name, metric in latest_age_profile: - if metric_name.upper() in profiler_metrics: - assert metric is not None - age_metric_ln += 1 - else: - assert metric is None if metric_name not in non_metric_values else True - - assert age_metric_ln == len(profiler_metrics) - - latest_exc_timestamp = latest_age_profile.timestamp.root - first_name_profile = self.metadata.get_profile_data( - f'{SERVICE_NAME}.default.MyBucket."profiler_test_.csv".first_name_profile', - get_beginning_of_day_timestamp_mill(), - get_end_of_day_timestamp_mill(), - profile_type=ColumnProfile, - ).entities - - assert not [ - p for p in first_name_profile if p.timestamp.root == latest_exc_timestamp - ] - - sample_data = self.metadata.get_sample_data(table) - assert sorted([c.root for c in sample_data.sampleData.columns]) == sorted( - ["id", "age"] - ) - - def tearDown(self): - s3 = boto3.resource( - "s3", - region_name=REGION, - ) - bucket = s3.Bucket(BUCKET_NAME) - for key in bucket.objects.all(): - key.delete() - bucket.delete() - - service_id = str( - self.metadata.get_by_name(entity=DatabaseService, fqn=SERVICE_NAME).id.root - ) - - self.metadata.delete( - entity=DatabaseService, - entity_id=service_id, - recursive=True, - hard_delete=True, - ) diff --git a/ingestion/tests/integration/sources/database/delta_lake/conftest.py b/ingestion/tests/integration/sources/database/delta_lake/conftest.py index 3fc5ac318910..a11a33076c92 100644 --- a/ingestion/tests/integration/sources/database/delta_lake/conftest.py +++ b/ingestion/tests/integration/sources/database/delta_lake/conftest.py @@ -36,7 +36,7 @@ def with_exposed_port(self, minio): ] = f"http://localhost:{self.minio_config.exposed_port}" -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def deltalake_storage_environment(): config = DeltaLakeStorageTestConfig() minio = get_minio_container(config.minio_config) diff --git a/ingestion/tests/unit/data_quality/source/test_test_suite.py b/ingestion/tests/unit/data_quality/source/test_test_suite.py index d909026a4f3e..822f125a8468 100644 --- a/ingestion/tests/unit/data_quality/source/test_test_suite.py +++ b/ingestion/tests/unit/data_quality/source/test_test_suite.py @@ -5,6 +5,9 @@ from metadata.data_quality.source.test_suite import TestSuiteSource from metadata.generated.schema.entity.data.table import Table +from metadata.generated.schema.entity.services.databaseService import ( + DatabaseServiceType, +) from metadata.generated.schema.metadataIngestion.workflow import ( OpenMetadataWorkflowConfig, ) @@ -76,6 +79,7 @@ def test_source_config(parameters, expected, monkeypatch): name="test_table", columns=[], testSuite=MOCK_ENTITY_REFERENCE, + serviceType=DatabaseServiceType.Postgres, ) mock_metadata.list_all_entities.return_value = [ TestCase( diff --git a/ingestion/tests/unit/profiler/pandas/test_custom_metrics.py b/ingestion/tests/unit/profiler/pandas/test_custom_metrics.py index 0270750c702f..cac724d17e1a 100644 --- a/ingestion/tests/unit/profiler/pandas/test_custom_metrics.py +++ b/ingestion/tests/unit/profiler/pandas/test_custom_metrics.py @@ -18,10 +18,7 @@ from unittest.mock import patch from uuid import uuid4 -import boto3 -import botocore import pandas as pd -from moto import mock_aws from metadata.generated.schema.entity.data.table import Column as EntityColumn from metadata.generated.schema.entity.data.table import ColumnName, DataType, Table @@ -43,7 +40,6 @@ REGION = "us-west-1" -@mock_aws class MetricsTest(TestCase): """ Run checks on different metrics @@ -103,45 +99,6 @@ class MetricsTest(TestCase): ) def setUp(self): - # Mock our S3 bucket and ingest a file - boto3.DEFAULT_SESSION = None - self.client = boto3.client( - "s3", - region_name=REGION, - ) - - # check that we are not running our test against a real bucket - try: - s3 = boto3.resource( - "s3", - region_name=REGION, - aws_access_key_id="fake_access_key", - aws_secret_access_key="fake_secret_key", - ) - s3.meta.client.head_bucket(Bucket=BUCKET_NAME) - except botocore.exceptions.ClientError: - pass - else: - err = f"{BUCKET_NAME} should not exist." - raise EnvironmentError(err) - self.client.create_bucket( - Bucket=BUCKET_NAME, - CreateBucketConfiguration={"LocationConstraint": REGION}, - ) - - resources_paths = [ - os.path.join(path, filename) - for path, _, files in os.walk(self.resources_dir) - for filename in files - ] - - self.s3_keys = [] - - for path in resources_paths: - key = os.path.relpath(path, self.resources_dir) - self.s3_keys.append(key) - self.client.upload_file(Filename=path, Bucket=BUCKET_NAME, Key=key) - with patch( "metadata.mixins.pandas.pandas_mixin.fetch_dataframe", return_value=self.dfs, diff --git a/ingestion/tests/unit/utils/test_logger.py b/ingestion/tests/unit/utils/test_logger.py new file mode 100644 index 000000000000..432e220161e9 --- /dev/null +++ b/ingestion/tests/unit/utils/test_logger.py @@ -0,0 +1,31 @@ +from metadata.utils.logger import redacted_config + + +def test_safe_config_logger(): + example_obj = { + "serviceConnection": "some_value", + "securityConfig": "another_value", + "nested": { + "serviceConnection": "another_value", + "list": [ + {"serviceConnection": "value_in_list"}, + {"otherField": "other_value"}, + {"securityConfig": "security_value"}, + ], + }, + } + + result = redacted_config(example_obj) + expected = { + "serviceConnection": "REDACTED", + "securityConfig": "REDACTED", + "nested": { + "serviceConnection": "REDACTED", + "list": [ + {"serviceConnection": "REDACTED"}, + {"otherField": "other_value"}, + {"securityConfig": "REDACTED"}, + ], + }, + } + assert result == expected diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/changeEvent/email/EmailPublisher.java b/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/changeEvent/email/EmailPublisher.java index dc882e2963a0..cc31a5c0ff37 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/changeEvent/email/EmailPublisher.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/changeEvent/email/EmailPublisher.java @@ -31,8 +31,8 @@ import org.openmetadata.service.formatter.decorators.EmailMessageDecorator; import org.openmetadata.service.formatter.decorators.MessageDecorator; import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.JsonUtils; +import org.openmetadata.service.util.email.EmailUtil; @Slf4j public class EmailPublisher implements Destination { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/insights/DataInsightsReportApp.java b/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/insights/DataInsightsReportApp.java index 6ca4ccbe0cdc..998b1fae172a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/insights/DataInsightsReportApp.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/apps/bundles/insights/DataInsightsReportApp.java @@ -6,6 +6,7 @@ import static org.openmetadata.service.apps.scheduler.AppScheduler.APP_NAME; import static org.openmetadata.service.util.SubscriptionUtil.getAdminsData; import static org.openmetadata.service.util.Utilities.getMonthAndDateFromEpoch; +import static org.openmetadata.service.util.email.TemplateConstants.DATA_INSIGHT_REPORT_TEMPLATE; import java.io.IOException; import java.util.HashMap; @@ -41,10 +42,10 @@ import org.openmetadata.service.jdbi3.ListFilter; import org.openmetadata.service.search.SearchClient; import org.openmetadata.service.search.SearchRepository; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; import org.openmetadata.service.util.Utilities; +import org.openmetadata.service.util.email.EmailUtil; import org.openmetadata.service.workflows.searchIndex.PaginatedEntitiesSource; import org.quartz.JobExecutionContext; @@ -142,7 +143,7 @@ private void sendReportsToTeams(SearchClient searchClient, TimeConfig timeConfig ownershipTemplate, tierTemplate, EmailUtil.getDataInsightReportSubject(), - EmailUtil.DATA_INSIGHT_REPORT_TEMPLATE); + DATA_INSIGHT_REPORT_TEMPLATE); } catch (Exception ex) { LOG.error( "[DataInsightReport] Failed for Team: {}, Reason : {}", @@ -177,7 +178,7 @@ private void sendToAdmins(SearchClient searchClient, TimeConfig timeConfig) { ownershipTemplate, tierTemplate, EmailUtil.getDataInsightReportSubject(), - EmailUtil.DATA_INSIGHT_REPORT_TEMPLATE); + DATA_INSIGHT_REPORT_TEMPLATE); } catch (Exception ex) { LOG.error("[DataInsightReport] Failed for Admin, Reason : {}", ex.getMessage(), ex); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/EmailMessageDecorator.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/EmailMessageDecorator.java index 1bda0605d4a7..f9345f7d27dd 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/EmailMessageDecorator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/EmailMessageDecorator.java @@ -14,7 +14,7 @@ package org.openmetadata.service.formatter.decorators; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import java.util.ArrayList; import java.util.Collections; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/GChatMessageDecorator.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/GChatMessageDecorator.java index d3cad6e90d50..4fbf0c6ba39f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/GChatMessageDecorator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/GChatMessageDecorator.java @@ -14,7 +14,7 @@ package org.openmetadata.service.formatter.decorators; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import java.util.ArrayList; import java.util.List; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MSTeamsMessageDecorator.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MSTeamsMessageDecorator.java index a977e829a785..68e432a5ca44 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MSTeamsMessageDecorator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MSTeamsMessageDecorator.java @@ -14,7 +14,7 @@ package org.openmetadata.service.formatter.decorators; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import java.util.ArrayList; import java.util.List; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MessageDecorator.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MessageDecorator.java index 8c02df3d95e5..89d9e53a455b 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MessageDecorator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/MessageDecorator.java @@ -387,6 +387,15 @@ default OutgoingMessage createThreadMessage(String publisherName, ChangeEvent ev String.format( "Ends At : %s", getDateString(thread.getAnnouncement().getEndTime()))); } + case ENTITY_DELETED -> { + headerMessage = + String.format( + "[%s] **@%s** posted an update on **Announcement**", + publisherName, thread.getUpdatedBy()); + attachmentList.add( + String.format( + "Announcement Deleted: %s", thread.getAnnouncement().getDescription())); + } } } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/SlackMessageDecorator.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/SlackMessageDecorator.java index 42066979825c..27607275cd02 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/SlackMessageDecorator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/decorators/SlackMessageDecorator.java @@ -14,7 +14,7 @@ package org.openmetadata.service.formatter.decorators; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import java.util.ArrayList; import java.util.List; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DocumentRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DocumentRepository.java index 323f97be73a6..b49499d4ee1c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DocumentRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DocumentRepository.java @@ -30,10 +30,10 @@ import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.resources.docstore.DocStoreResource; import org.openmetadata.service.resources.settings.SettingsCache; -import org.openmetadata.service.util.DefaultTemplateProvider; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.JsonUtils; -import org.openmetadata.service.util.TemplateProvider; +import org.openmetadata.service.util.email.DefaultTemplateProvider; +import org.openmetadata.service.util.email.TemplateProvider; @Slf4j public class DocumentRepository extends EntityRepository { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SystemRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SystemRepository.java index 598049453b2a..6f268bebbc03 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SystemRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/SystemRepository.java @@ -99,13 +99,8 @@ public Settings getConfigWithKey(String key) { if (fetchedSettings == null) { return null; } - if (fetchedSettings.getConfigType() == SettingsType.EMAIL_CONFIGURATION) { - SmtpSettings emailConfig = (SmtpSettings) fetchedSettings.getConfigValue(); - emailConfig.setPassword("***********"); - fetchedSettings.setConfigValue(emailConfig); - } - return fetchedSettings; + return fetchedSettings; } catch (Exception ex) { LOG.error("Error while trying fetch Settings ", ex); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/docstore/DocStoreResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/docstore/DocStoreResource.java index a5f0409ae1c5..fb69f12fec28 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/docstore/DocStoreResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/docstore/DocStoreResource.java @@ -64,9 +64,9 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.resources.EntityResource; import org.openmetadata.service.security.Authorizer; -import org.openmetadata.service.util.DefaultTemplateProvider; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; +import org.openmetadata.service.util.email.DefaultTemplateProvider; @Slf4j @Path("/v1/docStore") diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java index 1624e75fd4a2..5a272ced6721 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java @@ -127,6 +127,11 @@ public Response search( @DefaultValue("10") @QueryParam("size") int size, + @Parameter( + description = + "When paginating, specify the search_after values. Use it ass search_after=,,...") + @QueryParam("search_after") + String searchAfter, @Parameter( description = "Sort the search results by field, available fields to " @@ -196,6 +201,7 @@ public Response search( .domains(domains) .applyDomainFilter( !subjectContext.isAdmin() && subjectContext.hasAnyRole(DOMAIN_ONLY_ACCESS_ROLE)) + .searchAfter(searchAfter) .build(); return searchRepository.search(request); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java index bcd4aebe4946..25461f404133 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/services/ingestionpipelines/IngestionPipelineResource.java @@ -21,7 +21,6 @@ import static org.openmetadata.service.jdbi3.IngestionPipelineRepository.validateProfileSample; import io.swagger.v3.oas.annotations.ExternalDocumentation; -import io.swagger.v3.oas.annotations.Hidden; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.media.Content; @@ -93,7 +92,6 @@ @Tag( name = "Ingestion Pipelines", description = "APIs related pipelines/workflows created by the system to ingest metadata.") -@Hidden @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Collection(name = "IngestionPipelines") diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/system/SystemResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/system/SystemResource.java index d327ebc568d1..743632c80256 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/system/SystemResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/system/SystemResource.java @@ -46,8 +46,8 @@ import org.openmetadata.service.resources.Collection; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.JwtFilter; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.ResultList; +import org.openmetadata.service.util.email.EmailUtil; @Path("/v1/system") @Tag(name = "System", description = "APIs related to System configuration and settings.") diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java index ccee98f4de26..18a60fa1a986 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/ClassificationResource.java @@ -48,7 +48,6 @@ import org.openmetadata.schema.api.classification.CreateClassification; import org.openmetadata.schema.api.data.RestoreEntity; import org.openmetadata.schema.entity.classification.Classification; -import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; @@ -440,7 +439,7 @@ public Response delete( content = @Content( mediaType = "application/json", - schema = @Schema(implementation = Table.class))) + schema = @Schema(implementation = Classification.class))) }) public Response restore( @Context UriInfo uriInfo, diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java index 8a6e406c5fdc..999cdd326afc 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java @@ -27,12 +27,12 @@ import static org.openmetadata.service.jdbi3.UserRepository.AUTH_MECHANISM_FIELD; import static org.openmetadata.service.secrets.ExternalSecretsManager.NULL_SECRET_STRING; import static org.openmetadata.service.security.jwt.JWTTokenGenerator.getExpiryDate; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; import static org.openmetadata.service.util.UserUtil.getRoleListFromUser; import static org.openmetadata.service.util.UserUtil.getRolesFromAuthorizationToken; import static org.openmetadata.service.util.UserUtil.getUser; import static org.openmetadata.service.util.UserUtil.reSyncUserRolesFromToken; import static org.openmetadata.service.util.UserUtil.validateAndGetRolesRef; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import at.favre.lib.crypto.bcrypt.BCrypt; import freemarker.template.TemplateException; @@ -146,7 +146,6 @@ import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.security.policyevaluator.ResourceContext; import org.openmetadata.service.security.saml.JwtTokenCacheManager; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.JsonUtils; @@ -154,6 +153,8 @@ import org.openmetadata.service.util.RestUtil.PutResponse; import org.openmetadata.service.util.ResultList; import org.openmetadata.service.util.TokenUtil; +import org.openmetadata.service.util.email.EmailUtil; +import org.openmetadata.service.util.email.TemplateConstants; @Slf4j @Path("/v1/users") @@ -644,7 +645,7 @@ private void sendInviteMailToUserForBasicAuth(UriInfo uriInfo, User user, Create authHandler.sendInviteMailToUser( uriInfo, user, - String.format("Welcome to %s", EmailUtil.getEmailingEntity()), + String.format("Welcome to %s", EmailUtil.getSmtpSettings().getEmailingEntity()), create.getCreatePasswordType(), create.getPassword()); } catch (Exception ex) { @@ -1074,7 +1075,7 @@ public Response generateResetPasswordLink(@Context UriInfo uriInfo, @Valid Email uriInfo, registeredUser, EmailUtil.getPasswordResetSubject(), - EmailUtil.PASSWORD_RESET_TEMPLATE_FILE); + TemplateConstants.RESET_LINK_TEMPLATE); } catch (Exception ex) { LOG.error("Error in sending mail for reset password" + ex.getMessage()); return Response.status(424).entity(new ErrorMessage(424, EMAIL_SENDING_ISSUE)).build(); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchIndexUtils.java b/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchIndexUtils.java index 8f3bd137416b..edede39ad542 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchIndexUtils.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchIndexUtils.java @@ -4,13 +4,17 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.json.JsonArray; +import javax.json.JsonNumber; import javax.json.JsonObject; +import javax.json.JsonString; +import javax.json.JsonValue; import org.openmetadata.schema.tests.DataQualityReport; import org.openmetadata.schema.tests.Datum; import org.openmetadata.schema.tests.type.DataQualityReportMetadata; @@ -191,10 +195,17 @@ private static void traverseAggregationResults( .forEach( bucket -> { JsonObject bucketObject = (JsonObject) bucket; - Optional bucketKey = Optional.of(bucketObject.getString("key")); - - bucketKey.ifPresentOrElse( - s -> nodeData.put(dimensions.get(0), s), + Optional val = Optional.of(bucketObject.get("key")); + + val.ifPresentOrElse( + s -> { + switch (s.getValueType()) { + case NUMBER -> nodeData.put( + dimensions.get(0), String.valueOf((JsonNumber) s)); + default -> nodeData.put( + dimensions.get(0), ((JsonString) s).getString()); + } + }, () -> nodeData.put(dimensions.get(0), null)); // Traverse the next level of the aggregation tree. @@ -257,21 +268,36 @@ public static Map buildAggregationString(String aggregation) { Map aggregationMap = new HashMap<>(); String[] parts = nested[i].split(":"); - for (String part : parts) { - String[] kvPairs = part.split("="); - if (kvPairs[0].equals("field")) { - aggregationString - .append("\"") - .append(kvPairs[0]) - .append("\":\"") - .append(kvPairs[1]) - .append("\""); + Iterator partsIterator = Arrays.stream(parts).iterator(); + + while (partsIterator.hasNext()) { + String part = partsIterator.next(); + if (!partsIterator.hasNext()) { + // last element = key=value pairs of the aggregation + String[] subParts = part.split("&"); + Arrays.stream(subParts) + .forEach( + subPart -> { + String[] kvPairs = subPart.split("="); + aggregationString + .append("\"") + .append(kvPairs[0]) + .append("\":\"") + .append(kvPairs[1]) + .append("\""); + aggregationMap.put(kvPairs[0], kvPairs[1]); + // add comma if not the last element + if (Arrays.asList(subParts).indexOf(subPart) < subParts.length - 1) + aggregationString.append(","); + }); aggregationString.append("}"); } else { + String[] kvPairs = part.split("="); aggregationString.append("\"").append(kvPairs[1]).append("\":{"); + aggregationMap.put(kvPairs[0], kvPairs[1]); } - aggregationMap.put(kvPairs[0], kvPairs[1]); } + if (i < nested.length - 1) { aggregationString.append(",\"aggs\":{"); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchRequest.java b/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchRequest.java index b64df29b8d3a..db280ec279cd 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchRequest.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/search/SearchRequest.java @@ -1,7 +1,10 @@ package org.openmetadata.service.search; +import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; + import java.util.List; import java.util.stream.Collectors; +import java.util.stream.Stream; import lombok.Getter; import lombok.Setter; import org.openmetadata.schema.type.EntityReference; @@ -25,6 +28,7 @@ public class SearchRequest { private final boolean applyDomainFilter; private final List domains; private final boolean getHierarchy; + private final Object[] searchAfter; public SearchRequest(ElasticSearchRequestBuilder builder) { this.query = builder.query; @@ -43,6 +47,7 @@ public SearchRequest(ElasticSearchRequestBuilder builder) { this.getHierarchy = builder.getHierarchy; this.domains = builder.domains; this.applyDomainFilter = builder.applyDomainFilter; + this.searchAfter = builder.searchAfter; } // Builder class for ElasticSearchRequest @@ -64,6 +69,7 @@ public static class ElasticSearchRequestBuilder { private boolean getHierarchy; private boolean applyDomainFilter; private List domains; + private Object[] searchAfter; public ElasticSearchRequestBuilder(String query, int size, String index) { this.query = query; @@ -139,6 +145,14 @@ public ElasticSearchRequestBuilder domains(List references) { return this; } + public ElasticSearchRequestBuilder searchAfter(String searchAfter) { + this.searchAfter = null; + if (!nullOrEmpty(searchAfter)) { + this.searchAfter = Stream.of(searchAfter.split(",")).toArray(Object[]::new); + } + return this; + } + public SearchRequest build() { return new SearchRequest(this); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticsearch/ElasticSearchClient.java b/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticsearch/ElasticSearchClient.java index ea108833e050..47989d82a556 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticsearch/ElasticSearchClient.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/search/elasticsearch/ElasticSearchClient.java @@ -376,6 +376,10 @@ public Response search(SearchRequest request) throws IOException { } } + if (!nullOrEmpty(request.getSearchAfter())) { + searchSourceBuilder.searchAfter(request.getSearchAfter()); + } + /* For backward-compatibility we continue supporting the deleted argument, this should be removed in future versions */ if (request .getIndex() @@ -1005,6 +1009,15 @@ public static List buildAggregation(JsonObject aggregations) AggregationBuilders.avg(key).field(avgAggregation.getString("field")); aggregationBuilders.add(avgAggregationBuilder); break; + case "date_histogram": + JsonObject dateHistogramAggregation = aggregation.getJsonObject(aggregationType); + String calendarInterval = dateHistogramAggregation.getString("calendar_interval"); + DateHistogramAggregationBuilder dateHistogramAggregationBuilder = + AggregationBuilders.dateHistogram(key) + .field(dateHistogramAggregation.getString("field")) + .calendarInterval(new DateHistogramInterval(calendarInterval)); + aggregationBuilders.add(dateHistogramAggregationBuilder); + break; case "nested": JsonObject nestedAggregation = aggregation.getJsonObject("nested"); AggregationBuilder nestedAggregationBuilder = diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/BasicAuthenticator.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/BasicAuthenticator.java index 11daa6041f9d..e6c2041c5f60 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/BasicAuthenticator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/BasicAuthenticator.java @@ -37,9 +37,17 @@ import static org.openmetadata.service.exception.CatalogExceptionMessage.TOKEN_EXPIRED; import static org.openmetadata.service.exception.CatalogExceptionMessage.TOKEN_EXPIRY_ERROR; import static org.openmetadata.service.resources.teams.UserResource.USER_PROTECTED_FIELDS; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; import static org.openmetadata.service.util.UserUtil.getRoleListFromUser; import static org.openmetadata.service.util.UserUtil.getUser; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.sendAccountStatus; +import static org.openmetadata.service.util.email.TemplateConstants.APPLICATION_LOGIN_LINK; +import static org.openmetadata.service.util.email.TemplateConstants.ENTITY; +import static org.openmetadata.service.util.email.TemplateConstants.INVITE_CREATE_PASSWORD_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.INVITE_RANDOM_PASSWORD_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.PASSWORD; +import static org.openmetadata.service.util.email.TemplateConstants.SUPPORT_URL; +import static org.openmetadata.service.util.email.TemplateConstants.USERNAME; import at.favre.lib.crypto.bcrypt.BCrypt; import freemarker.template.TemplateException; @@ -81,12 +89,12 @@ import org.openmetadata.service.security.AuthenticationException; import org.openmetadata.service.security.SecurityUtil; import org.openmetadata.service.security.jwt.JWTTokenGenerator; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.PasswordUtil; import org.openmetadata.service.util.RestUtil.PutResponse; import org.openmetadata.service.util.TokenUtil; +import org.openmetadata.service.util.email.EmailUtil; @Slf4j public class BasicAuthenticator implements AuthenticatorHandler { @@ -253,7 +261,7 @@ public void resetUserPasswordWithToken(UriInfo uriInfo, PasswordResetRequest req // Update user about Password Change try { - EmailUtil.sendAccountStatus(storedUser, "Update Password", "Change Successful"); + sendAccountStatus(storedUser, "Update Password", "Change Successful"); } catch (TemplateException ex) { LOG.error("Error in sending Password Change Mail to User. Reason : " + ex.getMessage(), ex); throw new CustomExceptionMessage(424, FAILED_SEND_EMAIL, EMAIL_SENDING_ISSUE); @@ -311,7 +319,7 @@ public void changeUserPwdWithOldPwd( sendInviteMailToUser( uriInfo, response.getEntity(), - String.format("%s: Password Update", EmailUtil.getEmailingEntity()), + String.format("%s: Password Update", getSmtpSettings().getEmailingEntity()), ADMIN_CREATE, request.getNewPassword()); } @@ -328,14 +336,14 @@ public void sendInviteMailToUser( switch (requestType) { case ADMIN_CREATE -> { Map templatePopulator = new HashMap<>(); - templatePopulator.put(EmailUtil.ENTITY, EmailUtil.getEmailingEntity()); - templatePopulator.put(EmailUtil.SUPPORT_URL, EmailUtil.getSupportUrl()); - templatePopulator.put(EmailUtil.USERNAME, user.getName()); - templatePopulator.put(EmailUtil.PASSWORD, pwd); - templatePopulator.put(EmailUtil.APPLICATION_LOGIN_LINK, EmailUtil.getOMUrl()); + templatePopulator.put(ENTITY, getSmtpSettings().getEmailingEntity()); + templatePopulator.put(SUPPORT_URL, getSmtpSettings().getSupportUrl()); + templatePopulator.put(USERNAME, user.getName()); + templatePopulator.put(PASSWORD, pwd); + templatePopulator.put(APPLICATION_LOGIN_LINK, getSmtpSettings().getOpenMetadataUrl()); try { EmailUtil.sendMail( - subject, templatePopulator, user.getEmail(), EmailUtil.INVITE_RANDOM_PWD, true); + subject, templatePopulator, user.getEmail(), INVITE_RANDOM_PASSWORD_TEMPLATE, true); } catch (TemplateException ex) { LOG.error( "Failed in sending Mail to user [{}]. Reason : {}", @@ -345,7 +353,7 @@ public void sendInviteMailToUser( } } case USER_CREATE -> sendPasswordResetLink( - uriInfo, user, subject, EmailUtil.INVITE_CREATE_PWD); + uriInfo, user, subject, INVITE_CREATE_PASSWORD_TEMPLATE); default -> LOG.error("Invalid Password Create Type"); } } @@ -478,7 +486,7 @@ public void recordFailedLoginAttempt(String providedIdentity, User storedUser) loginAttemptCache.recordFailedLogin(providedIdentity); int failedLoginAttempt = loginAttemptCache.getUserFailedLoginCount(providedIdentity); if (failedLoginAttempt == SecurityUtil.getLoginConfiguration().getMaxLoginFailAttempts()) { - EmailUtil.sendAccountStatus( + sendAccountStatus( storedUser, "Multiple Failed Login Attempts.", String.format( diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LdapAuthenticator.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LdapAuthenticator.java index 62911784fe93..005910e0de45 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LdapAuthenticator.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/auth/LdapAuthenticator.java @@ -68,11 +68,11 @@ import org.openmetadata.service.security.AuthenticationException; import org.openmetadata.service.security.SecurityUtil; import org.openmetadata.service.security.jwt.JWTTokenGenerator; -import org.openmetadata.service.util.EmailUtil; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.LdapUtil; import org.openmetadata.service.util.TokenUtil; import org.openmetadata.service.util.UserUtil; +import org.openmetadata.service.util.email.EmailUtil; import org.springframework.beans.BeanUtils; import org.springframework.util.CollectionUtils; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/SubscriptionUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/SubscriptionUtil.java index 359ea9771f5d..f763cce4c4d6 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/SubscriptionUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/SubscriptionUtil.java @@ -372,22 +372,31 @@ public static Set getTargetsForAlert( handleConversationNotification(category, type, event)); // TODO: For Announcement, Immediate Consumer needs to be Notified (find information from // Lineage) + case Announcement -> { + receiverUrls.addAll(buildReceivers(action, category, type, event, event.getEntityId())); + } } } else { EntityInterface entityInterface = getEntity(event); - receiverUrls.addAll( - buildReceiversListFromActions( - action, - category, - type, - Entity.getCollectionDAO(), - entityInterface.getId(), - event.getEntityType())); + receiverUrls.addAll(buildReceivers(action, category, type, event, entityInterface.getId())); } return receiverUrls; } + private static Set buildReceivers( + SubscriptionAction action, + SubscriptionDestination.SubscriptionCategory category, + SubscriptionDestination.SubscriptionType type, + ChangeEvent event, + UUID id) { + Set result = new HashSet<>(); + result.addAll( + buildReceiversListFromActions( + action, category, type, Entity.getCollectionDAO(), id, event.getEntityType())); + return result; + } + public static List getTargetsForWebhookAlert( SubscriptionAction action, SubscriptionDestination.SubscriptionCategory category, diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/UserUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/UserUtil.java index 07e7bab59a2a..01fe23b3a6c4 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/UserUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/UserUtil.java @@ -51,6 +51,7 @@ import org.openmetadata.service.security.jwt.JWTTokenGenerator; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.RestUtil.PutResponse; +import org.openmetadata.service.util.email.EmailUtil; @Slf4j public final class UserUtil { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/DefaultTemplateProvider.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/DefaultTemplateProvider.java similarity index 72% rename from openmetadata-service/src/main/java/org/openmetadata/service/util/DefaultTemplateProvider.java rename to openmetadata-service/src/main/java/org/openmetadata/service/util/email/DefaultTemplateProvider.java index 42c7ffe34d96..a288ca701945 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/DefaultTemplateProvider.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/DefaultTemplateProvider.java @@ -1,12 +1,12 @@ -package org.openmetadata.service.util; +package org.openmetadata.service.util.email; + +import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import freemarker.template.Configuration; import freemarker.template.Template; import java.io.IOException; import java.io.StringReader; import java.util.HashSet; -import java.util.List; -import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -15,7 +15,6 @@ import org.openmetadata.schema.email.EmailTemplate; import org.openmetadata.schema.email.EmailTemplatePlaceholder; import org.openmetadata.schema.email.TemplateValidationResponse; -import org.openmetadata.schema.entities.docStore.Document; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.DocumentRepository; @@ -32,7 +31,7 @@ public DefaultTemplateProvider() { public Template getTemplate(String templateName) throws IOException { EmailTemplate emailTemplate = documentRepository.fetchEmailTemplateByName(templateName); String template = emailTemplate.getTemplate(); - if (template == null || template.isEmpty()) { + if (nullOrEmpty(template)) { throw new IOException("Template content not found for template: " + templateName); } @@ -40,33 +39,6 @@ public Template getTemplate(String templateName) throws IOException { templateName, new StringReader(template), new Configuration(Configuration.VERSION_2_3_31)); } - public Map> getDocumentPlaceHolders() { - List documents = documentRepository.fetchAllEmailTemplates(); - - return documents.stream() - .collect( - Collectors.toMap( - Document::getName, - document -> { - EmailTemplate emailTemplate = - JsonUtils.convertValue(document.getData(), EmailTemplate.class); - return emailTemplate.getPlaceHolders(); - })); - } - - public Map> getPlaceholdersFromTemplate() { - List listOfDocuments = documentRepository.fetchAllEmailTemplates(); - - return listOfDocuments.stream() - .collect( - Collectors.toMap( - Document::getName, - document -> - extractPlaceholders( - JsonUtils.convertValue(document.getData(), EmailTemplate.class) - .getTemplate()))); - } - @Override public TemplateValidationResponse validateEmailTemplate(String docName, String actualContent) { Set expectedPlaceholders = diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/EmailUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/EmailUtil.java similarity index 64% rename from openmetadata-service/src/main/java/org/openmetadata/service/util/EmailUtil.java rename to openmetadata-service/src/main/java/org/openmetadata/service/util/email/EmailUtil.java index a6420a86cd2d..a87018d79810 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/EmailUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/EmailUtil.java @@ -11,8 +11,32 @@ * limitations under the License. */ -package org.openmetadata.service.util; - +package org.openmetadata.service.util.email; + +import static org.openmetadata.service.util.email.TemplateConstants.ACCOUNT_ACTIVITY_CHANGE_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.ACCOUNT_STATUS_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.ACTION_KEY; +import static org.openmetadata.service.util.email.TemplateConstants.ACTION_STATUS_KEY; +import static org.openmetadata.service.util.email.TemplateConstants.APPLICATION_LOGIN_LINK; +import static org.openmetadata.service.util.email.TemplateConstants.CHANGE_EVENT_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.CHANGE_EVENT_UPDATE; +import static org.openmetadata.service.util.email.TemplateConstants.DEFAULT_EXPIRATION_TIME; +import static org.openmetadata.service.util.email.TemplateConstants.EMAIL_IGNORE_MSG; +import static org.openmetadata.service.util.email.TemplateConstants.EMAIL_VERIFICATION_LINKKEY; +import static org.openmetadata.service.util.email.TemplateConstants.EMAIL_VERIFICATION_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.EMAIL_VERIFICATION_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.ENTITY; +import static org.openmetadata.service.util.email.TemplateConstants.EXPIRATION_TIME_KEY; +import static org.openmetadata.service.util.email.TemplateConstants.INVITE_RANDOM_PASSWORD_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.INVITE_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.PASSWORD; +import static org.openmetadata.service.util.email.TemplateConstants.PASSWORD_RESET_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.REPORT_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.SUPPORT_URL; +import static org.openmetadata.service.util.email.TemplateConstants.TASK_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.TEST_EMAIL_SUBJECT; +import static org.openmetadata.service.util.email.TemplateConstants.TEST_MAIL_TEMPLATE; +import static org.openmetadata.service.util.email.TemplateConstants.USERNAME; import static org.simplejavamail.api.mailer.config.TransportStrategy.SMTP; import static org.simplejavamail.api.mailer.config.TransportStrategy.SMTPS; import static org.simplejavamail.api.mailer.config.TransportStrategy.SMTP_TLS; @@ -22,6 +46,7 @@ import java.io.IOException; import java.io.StringWriter; import java.text.SimpleDateFormat; +import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -46,46 +71,8 @@ @Slf4j public class EmailUtil { - public static final String USERNAME = "userName"; - public static final String ENTITY = "entity"; - public static final String SUPPORT_URL = "supportUrl"; - // Email Verification - private static final String EMAIL_VERIFICATION_SUBJECT = - "%s: Verify your Email Address (Action Required)"; - public static final String EMAIL_VERIFICATION_LINKKEY = "userEmailTokenVerificationLink"; - public static final String EMAIL_VERIFICATION_TEMPLATE = "email-verification"; - // Password Reset Link - private static final String PASSWORD_RESET_SUBJECT = "%s: Reset your Password"; - public static final String PASSWORD_RESET_LINKKEY = "userResetPasswordLink"; - public static final String EXPIRATION_TIME_KEY = "expirationTime"; - public static final String DEFAULT_EXPIRATION_TIME = "60"; - public static final String PASSWORD = "password"; - public static final String APPLICATION_LOGIN_LINK = "applicationLoginLink"; - public static final String PASSWORD_RESET_TEMPLATE_FILE = "reset-link"; - // Account Change Status - private static final String ACCOUNT_STATUS_SUBJECT = "%s: Change in Account Status"; - public static final String ACTION_KEY = "action"; - public static final String ACTION_STATUS_KEY = "actionStatus"; - public static final String ACCOUNT_STATUS_TEMPLATE_FILE = "account-activity-change"; - private static final String INVITE_SUBJECT = "Welcome to %s"; - private static final String CHANGE_EVENT_UPDATE = "[%s] - Change Event Update from %s"; - - private static final String TASK_SUBJECT = "%s : Task Assignment Notification"; - public static final String INVITE_RANDOM_PWD = "invite-randompwd"; - - public static final String CHANGE_EVENT_TEMPLATE = "changeEvent"; - public static final String INVITE_CREATE_PWD = "invite-createPassword"; - public static final String TASK_NOTIFICATION_TEMPLATE = "taskAssignment"; - private static final String REPORT_SUBJECT = "%s: Data Insights Weekly - %s"; - public static final String DATA_INSIGHT_REPORT_TEMPLATE = "dataInsightReport"; - public static final String TEST_EMAIL_TEMPLATE = "testMail"; - public static final String TEST_EMAIL_SUBJECT = "%s : Test Email"; - private static SmtpSettings storedSmtpSettings; private static Mailer mailer; - - private static final String EMAIL_IGNORE_MSG = - "Email was not sent to {} as SMTP setting is not enabled"; - + private static SmtpSettings storedSmtpSettings; private static TemplateProvider templateProvider; static { @@ -93,7 +80,6 @@ public class EmailUtil { initializeTemplateProvider(); } - // initialize template provider private static void initializeTemplateProvider() { templateProvider = new DefaultTemplateProvider(); } @@ -102,7 +88,7 @@ private EmailUtil() { try { getSmtpSettings(); initializeTemplateProvider(); - LOG.info("Email Util cache is initialized"); + LOG.info("Email Util Cache is initialized"); } catch (Exception ex) { LOG.warn("[MAILER] Smtp Configurations are missing : Reason {} ", ex.getMessage(), ex); } @@ -137,18 +123,22 @@ private static Mailer createMailer(SmtpSettings smtpServerSettings) { public static void sendAccountStatus(User user, String action, String status) throws IOException, TemplateException { + if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put(ENTITY, getEmailingEntity()); - templatePopulator.put(SUPPORT_URL, getSupportUrl()); - templatePopulator.put(USERNAME, user.getName()); - templatePopulator.put(ACTION_KEY, action); - templatePopulator.put(ACTION_STATUS_KEY, status); + Map templatePopulator = + new TemplatePopulatorBuilder() + .add(ENTITY, getSmtpSettings().getEmailingEntity()) + .add(SUPPORT_URL, getSmtpSettings().getSupportUrl()) + .add(USERNAME, user.getName()) + .add(ACTION_KEY, action) + .add(ACTION_STATUS_KEY, status) + .build(); + sendMail( getAccountStatusChangeSubject(), templatePopulator, user.getEmail(), - ACCOUNT_STATUS_TEMPLATE_FILE, + ACCOUNT_ACTIVITY_CHANGE_TEMPLATE, true); } else { LOG.warn(EMAIL_IGNORE_MSG, user.getEmail()); @@ -158,12 +148,16 @@ public static void sendAccountStatus(User user, String action, String status) public static void sendEmailVerification(String emailVerificationLink, User user) throws IOException, TemplateException { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put(ENTITY, getEmailingEntity()); - templatePopulator.put(SUPPORT_URL, getSupportUrl()); - templatePopulator.put(USERNAME, user.getName()); - templatePopulator.put(EMAIL_VERIFICATION_LINKKEY, emailVerificationLink); - templatePopulator.put(EXPIRATION_TIME_KEY, "24"); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add(ENTITY, getSmtpSettings().getEmailingEntity()) + .add(SUPPORT_URL, getSmtpSettings().getSupportUrl()) + .add(USERNAME, user.getName()) + .add(EMAIL_VERIFICATION_LINKKEY, emailVerificationLink) + .add(EXPIRATION_TIME_KEY, "24") + .build(); + sendMail( getEmailVerificationSubject(), templatePopulator, @@ -179,12 +173,15 @@ public static void sendPasswordResetLink( String passwordResetLink, User user, String subject, String templateFilePath) throws IOException, TemplateException { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put(ENTITY, getEmailingEntity()); - templatePopulator.put(SUPPORT_URL, getSupportUrl()); - templatePopulator.put(USERNAME, user.getName()); - templatePopulator.put(PASSWORD_RESET_LINKKEY, passwordResetLink); - templatePopulator.put(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add(ENTITY, getSmtpSettings().getEmailingEntity()) + .add(SUPPORT_URL, getSmtpSettings().getSupportUrl()) + .add(USERNAME, user.getName()) + .add(EMAIL_VERIFICATION_LINKKEY, passwordResetLink) + .add(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME) + .build(); sendMail(subject, templatePopulator, user.getEmail(), templateFilePath, true); } else { @@ -201,15 +198,18 @@ public static void sendTaskAssignmentNotificationToUser( String templateFilePath) throws IOException, TemplateException { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put("assignee", assigneeName); - templatePopulator.put("createdBy", thread.getCreatedBy()); - templatePopulator.put("taskName", thread.getMessage()); - templatePopulator.put("taskStatus", thread.getTask().getStatus().toString()); - templatePopulator.put("taskType", thread.getTask().getType().toString()); - templatePopulator.put("fieldOldValue", thread.getTask().getOldValue()); - templatePopulator.put("fieldNewValue", thread.getTask().getSuggestion()); - templatePopulator.put("taskLink", taskLink); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add("assignee", assigneeName) + .add("createdBy", thread.getCreatedBy()) + .add("taskName", thread.getMessage()) + .add("taskStatus", thread.getTask().getStatus().toString()) + .add("taskType", thread.getTask().getType().toString()) + .add("fieldOldValue", thread.getTask().getOldValue()) + .add("fieldNewValue", thread.getTask().getSuggestion()) + .add("taskLink", taskLink) + .build(); sendMail(subject, templatePopulator, email, templateFilePath, true); } else { @@ -267,20 +267,24 @@ public static void sendMail(Email email, boolean async) { } } - public static void sendInviteMailToAdmin(User user, String pwd) { + public static void sendInviteMailToAdmin(User user, String password) { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put(EmailUtil.ENTITY, EmailUtil.getEmailingEntity()); - templatePopulator.put(EmailUtil.SUPPORT_URL, EmailUtil.getSupportUrl()); - templatePopulator.put(EmailUtil.USERNAME, user.getName()); - templatePopulator.put(EmailUtil.PASSWORD, pwd); - templatePopulator.put(EmailUtil.APPLICATION_LOGIN_LINK, EmailUtil.getOMUrl()); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add(ENTITY, getSmtpSettings().getEmailingEntity()) + .add(SUPPORT_URL, getSmtpSettings().getSupportUrl()) + .add(USERNAME, user.getName()) + .add(PASSWORD, password) + .add(APPLICATION_LOGIN_LINK, getSmtpSettings().getOpenMetadataUrl()) + .build(); + try { EmailUtil.sendMail( EmailUtil.getEmailInviteSubject(), templatePopulator, user.getEmail(), - EmailUtil.INVITE_RANDOM_PWD, + INVITE_RANDOM_PASSWORD_TEMPLATE, true); } catch (Exception ex) { LOG.error( @@ -294,22 +298,27 @@ public static void sendInviteMailToAdmin(User user, String pwd) { public static void sendChangeEventMail( String publisherName, String receiverMail, EmailMessage emailMessaged) { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put(EmailUtil.USERNAME, receiverMail.split("@")[0]); - templatePopulator.put("updatedBy", emailMessaged.getUpdatedBy()); - templatePopulator.put("entityUrl", emailMessaged.getEntityUrl()); + StringBuilder buff = new StringBuilder(); for (String cmessage : emailMessaged.getChangeMessage()) { buff.append(cmessage); buff.append("\n"); } - templatePopulator.put("changeMessage", buff.toString()); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add(USERNAME, receiverMail.split("@")[0]) + .add("updatedBy", emailMessaged.getUpdatedBy()) + .add("entityUrl", emailMessaged.getEntityUrl()) + .add("changeMessage", buff.toString()) + .build(); + try { EmailUtil.sendMail( EmailUtil.getChangeEventTemplate(publisherName), templatePopulator, receiverMail, - EmailUtil.CHANGE_EVENT_TEMPLATE, + CHANGE_EVENT_TEMPLATE, true); } catch (Exception ex) { LOG.error( @@ -332,16 +341,21 @@ public static void sendDataInsightEmailNotificationToUser( String templateFilePath) throws IOException, TemplateException { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put("startDate", startDate); - templatePopulator.put("endDate", endDate); - templatePopulator.put("totalAssetObj", totalAssetObj); - templatePopulator.put("descriptionObj", descriptionObj); - templatePopulator.put("ownershipObj", ownerShipObj); - templatePopulator.put("tierObj", tierObj); - templatePopulator.put( - "viewReportUrl", - String.format("%s/data-insights/data-assets", getSmtpSettings().getOpenMetadataUrl())); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add("startDate", startDate) + .add("endDate", endDate) + .add("totalAssetObj", totalAssetObj) + .add("descriptionObj", descriptionObj) + .add("ownershipObj", ownerShipObj) + .add("tierObj", tierObj) + .add( + "viewReportUrl", + String.format( + "%s/data-insights/data-assets", getSmtpSettings().getOpenMetadataUrl())) + .build(); + sendMailToMultiple(subject, templatePopulator, emails, templateFilePath); } else { LOG.warn(EMAIL_IGNORE_MSG, emails.toString()); @@ -351,11 +365,15 @@ public static void sendDataInsightEmailNotificationToUser( public static void sendTestEmail(String email, boolean async) throws IOException, TemplateException { if (Boolean.TRUE.equals(getSmtpSettings().getEnableSmtpServer())) { - Map templatePopulator = new HashMap<>(); - templatePopulator.put("userName", email.split("@")[0]); - templatePopulator.put("entity", getSmtpSettings().getEmailingEntity()); - templatePopulator.put("supportUrl", getSmtpSettings().getSupportUrl()); - sendMail(getTestEmailSubject(), templatePopulator, email, TEST_EMAIL_TEMPLATE, async); + + Map templatePopulator = + new TemplatePopulatorBuilder() + .add("userName", email.split("@")[0]) + .add("entity", getSmtpSettings().getEmailingEntity()) + .add("supportUrl", getSmtpSettings().getSupportUrl()) + .build(); + + sendMail(getTestEmailSubject(), templatePopulator, email, TEST_MAIL_TEMPLATE, async); } else { LOG.warn(EMAIL_IGNORE_MSG, email); } @@ -404,18 +422,6 @@ public static String getDataInsightReportSubject() { new SimpleDateFormat("dd-MM-yy").format(new Date())); } - public static String getEmailingEntity() { - return getSmtpSettings().getEmailingEntity(); - } - - public static String getSupportUrl() { - return getSmtpSettings().getSupportUrl(); - } - - public static String getOMUrl() { - return getSmtpSettings().getOpenMetadataUrl(); - } - public static SmtpSettings getSmtpSettings() { SmtpSettings emailConfig = SettingsCache.getSetting(SettingsType.EMAIL_CONFIGURATION, SmtpSettings.class); @@ -426,16 +432,27 @@ public static SmtpSettings getSmtpSettings() { return emailConfig; } - /** - * Check if given email address is valid - * - * @param email email address - * @return true if valid, false otherwise - */ public static Boolean isValidEmail(String email) { if (StringUtils.isBlank(email)) { return false; } return email.matches("^[\\w-\\.]+@([\\w-]+\\.)+[\\w-]{2,4}$"); } + + static class TemplatePopulatorBuilder { + private final Map templatePopulator; + + public TemplatePopulatorBuilder() { + this.templatePopulator = new HashMap<>(); + } + + public TemplatePopulatorBuilder add(String key, Object value) { + templatePopulator.put(key, value); + return this; + } + + public Map build() { + return Collections.unmodifiableMap(templatePopulator); + } + } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateConstants.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateConstants.java new file mode 100644 index 000000000000..495bcacd4e95 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateConstants.java @@ -0,0 +1,46 @@ +package org.openmetadata.service.util.email; + +public class TemplateConstants { + public static final String USERNAME = "userName"; + public static final String ENTITY = "entity"; + public static final String SUPPORT_URL = "supportUrl"; + + // templates + public static final String ACCOUNT_ACTIVITY_CHANGE_TEMPLATE = "account-activity-change"; + public static final String CHANGE_EVENT_TEMPLATE = "changeEvent"; + public static final String DATA_INSIGHT_REPORT_TEMPLATE = "dataInsightReport"; + public static final String EMAIL_VERIFICATION_TEMPLATE = "email-verification"; + public static final String INVITE_CREATE_PASSWORD_TEMPLATE = "invite-createPassword"; + public static final String INVITE_RANDOM_PASSWORD_TEMPLATE = "invite-randompwd"; + public static final String RESET_LINK_TEMPLATE = "reset-link"; + public static final String TASK_ASSIGNMENT_TEMPLATE = "taskAssignment"; + public static final String TEST_MAIL_TEMPLATE = "testMail"; + public static final String TEST_RESULT_STATUS_TEMPLATE = "testResultStatusTemplate"; + + // Email Verification + public static final String EMAIL_VERIFICATION_SUBJECT = + "%s: Verify your Email Address (Action Required)"; + public static final String EMAIL_VERIFICATION_LINKKEY = "userEmailTokenVerificationLink"; + + // Password Reset Link + public static final String PASSWORD_RESET_SUBJECT = "%s: Reset your Password"; + public static final String PASSWORD_RESET_LINKKEY = "userResetPasswordLink"; + public static final String EXPIRATION_TIME_KEY = "expirationTime"; + public static final String DEFAULT_EXPIRATION_TIME = "60"; + public static final String PASSWORD = "password"; + public static final String APPLICATION_LOGIN_LINK = "applicationLoginLink"; + + // Account Change Status + public static final String ACCOUNT_STATUS_SUBJECT = "%s: Change in Account Status"; + public static final String ACTION_KEY = "action"; + public static final String ACTION_STATUS_KEY = "actionStatus"; + public static final String INVITE_SUBJECT = "Welcome to %s"; + public static final String CHANGE_EVENT_UPDATE = "[%s] - Change Event Update from %s"; + + public static final String TASK_SUBJECT = "%s : Task Assignment Notification"; + + public static final String REPORT_SUBJECT = "%s: Data Insights Weekly - %s"; + public static final String TEST_EMAIL_SUBJECT = "%s : Test Email"; + public static final String EMAIL_IGNORE_MSG = + "Email was not sent to {} as SMTP setting is not enabled"; +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/TemplateProvider.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateProvider.java similarity index 65% rename from openmetadata-service/src/main/java/org/openmetadata/service/util/TemplateProvider.java rename to openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateProvider.java index e757659aa343..71fbd983b364 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/TemplateProvider.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/email/TemplateProvider.java @@ -1,10 +1,7 @@ -package org.openmetadata.service.util; +package org.openmetadata.service.util.email; import freemarker.template.Template; import java.io.IOException; -import java.util.Map; -import java.util.Set; -import org.openmetadata.schema.email.EmailTemplatePlaceholder; import org.openmetadata.schema.email.TemplateValidationResponse; public interface TemplateProvider { @@ -18,10 +15,4 @@ public interface TemplateProvider { * - "missingParameters" (List): If validation fails, lists the placeholders that are missing. */ TemplateValidationResponse validateEmailTemplate(String docName, String actualContent); - - /** - * Maps each template's name to a list of - * {@link EmailTemplatePlaceholder}s extracted from the template data. - */ - Map> getDocumentPlaceHolders(); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/docstore/DocStoreResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/docstore/DocStoreResourceTest.java index 76409827e887..6315fc6df753 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/docstore/DocStoreResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/docstore/DocStoreResourceTest.java @@ -8,7 +8,6 @@ import static org.openmetadata.service.Entity.DOCUMENT; import static org.openmetadata.service.Entity.PERSONA; import static org.openmetadata.service.exception.CatalogExceptionMessage.permissionNotAllowed; -import static org.openmetadata.service.util.EmailUtil.EMAIL_VERIFICATION_TEMPLATE; import static org.openmetadata.service.util.EntityUtil.fieldUpdated; import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; import static org.openmetadata.service.util.TestUtils.TEST_AUTH_HEADERS; @@ -17,6 +16,7 @@ import static org.openmetadata.service.util.TestUtils.assertListNotNull; import static org.openmetadata.service.util.TestUtils.assertResponse; import static org.openmetadata.service.util.TestUtils.put; +import static org.openmetadata.service.util.email.TemplateConstants.EMAIL_VERIFICATION_TEMPLATE; import java.io.IOException; import java.util.ArrayList; diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java index 1ffe67e5df02..564394829114 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/dqtests/TestCaseResourceTest.java @@ -28,6 +28,7 @@ import static org.openmetadata.schema.type.ColumnDataType.BIGINT; import static org.openmetadata.schema.type.MetadataOperation.EDIT_TESTS; import static org.openmetadata.service.Entity.ADMIN_USER_NAME; +import static org.openmetadata.service.Entity.getSearchRepository; import static org.openmetadata.service.exception.CatalogExceptionMessage.permissionNotAllowed; import static org.openmetadata.service.jdbi3.TestCaseRepository.FAILED_ROWS_SAMPLE_EXTENSION; import static org.openmetadata.service.security.SecurityUtil.authHeaders; @@ -75,6 +76,7 @@ import org.openmetadata.schema.api.tests.CreateTestSuite; import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.entity.feed.Thread; +import org.openmetadata.schema.tests.DataQualityReport; import org.openmetadata.schema.tests.ResultSummary; import org.openmetadata.schema.tests.TestCase; import org.openmetadata.schema.tests.TestCaseParameterValue; @@ -101,6 +103,8 @@ import org.openmetadata.service.resources.databases.TableResourceTest; import org.openmetadata.service.resources.feeds.FeedResourceTest; import org.openmetadata.service.resources.feeds.MessageParser; +import org.openmetadata.service.search.SearchIndexUtils; +import org.openmetadata.service.search.SearchRepository; import org.openmetadata.service.search.indexes.TestCaseIndex; import org.openmetadata.service.search.models.IndexMapping; import org.openmetadata.service.util.JsonUtils; @@ -2751,6 +2755,37 @@ void createUpdate_DynamicAssertionTests(TestInfo testInfo) throws IOException { assertFalse(testCase.getUseDynamicAssertion()); } + @Test + void aggregate_testCaseResults(TestInfo testInfo) throws IOException, ParseException { + // Set up tests + SearchRepository searchRepository = getSearchRepository(); + CreateTestCase create = createRequest(testInfo); + create + .withEntityLink(TABLE_COLUMN_LINK) + .withTestSuite(TEST_SUITE1.getFullyQualifiedName()) + .withTestDefinition(TEST_DEFINITION3.getFullyQualifiedName()) + .withParameterValues( + List.of(new TestCaseParameterValue().withValue("100").withName("missingCountValue"))); + TestCase testCase = createAndCheckEntity(create, ADMIN_AUTH_HEADERS); + for (int i = 1; i < 10; i++) { + TestCaseResult testCaseResult = + new TestCaseResult() + .withResult("tested") + .withTestCaseStatus(TestCaseStatus.Success) + .withTimestamp(TestUtils.dateToTimestamp("2021-09-0%s".formatted(i))); + putTestCaseResult(testCase.getFullyQualifiedName(), testCaseResult, ADMIN_AUTH_HEADERS); + } + + // Test aggregation + String aggregationQuery = + "bucketName=dates:aggType=date_histogram:field=timestamp&calendar_interval=1d,bucketName=dimesion:aggType=terms:field=testDefinition.dataQualityDimension"; + Map aggregationString = + SearchIndexUtils.buildAggregationString(aggregationQuery); + DataQualityReport dataQualityReport = + searchRepository.genericAggregation(null, "testCaseResult", aggregationString); + assertNotNull(dataQualityReport.getData()); + } + @Test void createTestCaseResults_wrongTs(TestInfo testInfo) throws IOException, HttpResponseException { CreateTestCase create = createRequest(testInfo); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/MSTeamsCallbackResource.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/MSTeamsCallbackResource.java index e1a4ab98bedc..3dcd4e11dae6 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/MSTeamsCallbackResource.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/MSTeamsCallbackResource.java @@ -1,6 +1,6 @@ package org.openmetadata.service.resources.events; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import javax.ws.rs.Consumes; import javax.ws.rs.Path; diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/SlackCallbackResource.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/SlackCallbackResource.java index ae53f252df76..ecbc6e077f52 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/SlackCallbackResource.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/events/SlackCallbackResource.java @@ -1,7 +1,7 @@ package org.openmetadata.service.resources.events; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; -import static org.openmetadata.service.util.EmailUtil.getSmtpSettings; +import static org.openmetadata.service.util.email.EmailUtil.getSmtpSettings; import javax.ws.rs.Consumes; import javax.ws.rs.Path; diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java index 8756a499a9be..a29aa7d93b64 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/searchindex/SearchIndexResourceTest.java @@ -447,6 +447,22 @@ void testBuildAggregationString(TestInfo testInfo) { expectedAggregationString = "\"entityLinks\":{\"terms\":{\"field\":\"entityLinks.nonNormalized\"},\"aggs\":{\"minPrice\":{\"min\":{\"field\":\"price.adjusted\"}}}}"; assertEquals(expectedAggregationString, actualAggregationstring.get("aggregationStr")); + + // Date histogram aggregation + aggregationString = + "bucketName=dates:aggType=date_histogram:field=timestamp&calendar_interval=2d"; + actualAggregationstring = SearchIndexUtils.buildAggregationString(aggregationString); + expectedAggregationString = + "\"dates\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"2d\"}}"; + assertEquals(expectedAggregationString, actualAggregationstring.get("aggregationStr")); + + // Date histogram aggregation with sub aggregation + aggregationString = + "bucketName=dates:aggType=date_histogram:field=timestamp&calendar_interval=2d,bucketName=minPrice:aggType=min:field=price.adjusted"; + actualAggregationstring = SearchIndexUtils.buildAggregationString(aggregationString); + expectedAggregationString = + "\"dates\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"2d\"},\"aggs\":{\"minPrice\":{\"min\":{\"field\":\"price.adjusted\"}}}}"; + assertEquals(expectedAggregationString, actualAggregationstring.get("aggregationStr")); } @Test @@ -515,6 +531,12 @@ void testNewAggregation(TestInfo testInfo) throws IOException { Map m = datum.getAdditionalProperties(); assertTrue(m.containsKey("fullyQualifiedName")); }); + + aggregationQuery = + "bucketName=dates:aggType=date_histogram:field=timestamp&calendar_interval=1d,bucketName=dimesion:aggType=terms:field=testDefinition.dataQualityDimension"; + aggregationString = SearchIndexUtils.buildAggregationString(aggregationQuery); + dataQualityReport = + searchRepository.genericAggregation(null, "testCaseResult", aggregationString); } @Override diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java index 97e781c09a57..7069a22491a8 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/system/SystemResourceTest.java @@ -183,10 +183,10 @@ void testSystemConfigs() throws HttpResponseException { // Test Email Config Settings emailSettings = getSystemConfig(SettingsType.EMAIL_CONFIGURATION); SmtpSettings smtp = JsonUtils.convertValue(emailSettings.getConfigValue(), SmtpSettings.class); - // Password for Email is always sent in hidden + // Password for Email is encrypted using fernet SmtpSettings expected = config.getSmtpSettings(); - expected.setPassword("***********"); - assertEquals(expected, smtp); + expected.setPassword(smtp.getPassword()); + assertEquals(config.getSmtpSettings(), smtp); // Test Custom Ui Theme Preference Config Settings uiThemeConfigWrapped = getSystemConfig(SettingsType.CUSTOM_UI_THEME_PREFERENCE); diff --git a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/MyData.spec.ts b/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/MyData.spec.ts deleted file mode 100644 index c3c70a29aad1..000000000000 --- a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/MyData.spec.ts +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright 2024 Collate. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { - interceptURL, - uuid, - verifyResponseStatusCode, -} from '../../common/common'; -import { - createSingleLevelEntity, - generateRandomContainer, - generateRandomDashboard, - generateRandomMLModel, - generateRandomPipeline, - generateRandomTable, - generateRandomTopic, - hardDeleteService, -} from '../../common/EntityUtils'; -import { createEntityTableViaREST } from '../../common/Utils/Entity'; -import { getToken } from '../../common/Utils/LocalStorage'; -import { generateRandomUser } from '../../common/Utils/Owner'; -import { - DATABASE_SERVICE, - SINGLE_LEVEL_SERVICE, -} from '../../constants/EntityConstant'; -import { SERVICE_CATEGORIES } from '../../constants/service.constants'; - -const user1 = generateRandomUser(); -let user1Id = ''; -const user2 = generateRandomUser(); -let user2Id = ''; - -// generate schema for 20 tables -const tables = Array(20) - .fill(undefined) - .map(() => generateRandomTable()); - -const entities = { - table: { - request: { - url: '/api/v1/tables', - body1: generateRandomTable(), - body2: generateRandomTable(), - }, - id1: '', - id2: '', - }, - topic: { - request: { - url: '/api/v1/topics', - body1: generateRandomTopic(), - body2: generateRandomTopic(), - }, - id1: '', - id2: '', - }, - dashboard: { - request: { - url: '/api/v1/dashboards', - body1: generateRandomDashboard(), - body2: generateRandomDashboard(), - }, - id1: '', - id2: '', - }, - pipeline: { - request: { - url: '/api/v1/pipelines', - body1: generateRandomPipeline(), - body2: generateRandomPipeline(), - }, - id1: '', - id2: '', - }, - mlmodel: { - request: { - url: '/api/v1/mlmodels', - body1: generateRandomMLModel(), - body2: generateRandomMLModel(), - }, - id1: '', - id2: '', - }, - container: { - request: { - url: '/api/v1/containers', - body1: generateRandomContainer(), - body2: generateRandomContainer(), - }, - id1: '', - id2: '', - }, -}; -const team = { - name: `cy-test-team-${uuid()}`, - id: '', -}; - -const verifyEntities = ({ url }) => { - interceptURL('GET', url, 'getEntities'); - cy.get('[data-testid="pagination"] .ant-btn-default') - .scrollIntoView() - .click(); - - // change pagination size to 25 - cy.get('[role="menu"] [value="25"]').click(); - verifyResponseStatusCode('@getEntities', 200); - - // verify all tables are present - tables.forEach((table) => { - cy.get( - `[data-testid="table-data-card_${table.databaseSchema}.${table.name}"]` - ) - .scrollIntoView() - .should('be.exist'); - }); -}; - -const updateOwnerAndVerify = ({ url, body, type, entityName, newOwner }) => { - interceptURL('GET', '/api/v1/users/loggedInUser?*', 'loggedInUser'); - interceptURL( - 'GET', - '/api/v1/feed?type=Conversation&filterType=OWNER_OR_FOLLOWS&userId=*', - 'feedData' - ); - cy.getAllLocalStorage().then((data) => { - const token = getToken(data); - cy.request({ - method: 'PATCH', - url, - headers: { - Authorization: `Bearer ${token}`, - 'Content-Type': 'application/json-patch+json', - }, - body, - }).then(() => { - cy.get('[id*="tab-mentions"]').click(); - cy.get('[data-testid="no-data-placeholder-container"]').should( - 'be.visible' - ); - cy.get('[id*="tab-all"]').click(); - verifyResponseStatusCode('@feedData', 200); - cy.get('[data-testid="message-container"]').first().as('message'); - cy.get('@message') - .find('[data-testid="entityType"]') - .should('contain', type); - cy.get('@message') - .find('[data-testid="entitylink"]') - .should('contain', entityName); - cy.get('@message') - .find('[data-testid="viewer-container"]') - .should('contain', `Added owner: ${newOwner}`); - }); - }); -}; - -const prepareData = () => { - cy.login(); - cy.getAllLocalStorage().then((data) => { - const token = getToken(data); - SINGLE_LEVEL_SERVICE.forEach((data) => { - createSingleLevelEntity({ - token, - ...data, - entity: [], - }); - }); - // create user - cy.request({ - method: 'POST', - url: `/api/v1/users/signup`, - headers: { Authorization: `Bearer ${token}` }, - body: user1, - }).then((response) => { - user1Id = response.body.id; - - // create team - cy.request({ - method: 'GET', - url: `/api/v1/teams/name/Organization`, - headers: { Authorization: `Bearer ${token}` }, - }).then((teamResponse) => { - cy.request({ - method: 'POST', - url: `/api/v1/teams`, - headers: { Authorization: `Bearer ${token}` }, - body: { - name: team.name, - displayName: team.name, - teamType: 'Group', - parents: [teamResponse.body.id], - users: [response.body.id], - }, - }).then((teamResponse) => { - team.id = teamResponse.body.id; - }); - }); - - // create database service - createEntityTableViaREST({ - token, - ...DATABASE_SERVICE, - tables: [], - }); - - // generate 20 tables with newly created user as owner - tables.forEach((table) => { - cy.request({ - method: 'POST', - url: `/api/v1/tables`, - headers: { Authorization: `Bearer ${token}` }, - body: { ...table, owners: [{ id: response.body.id, type: 'user' }] }, - }).then((tableResponse) => { - cy.request({ - method: 'PUT', - url: `/api/v1/tables/${tableResponse.body.id}/followers`, - headers: { - Authorization: `Bearer ${token}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify(response.body.id), - }); - }); - }); - }); - - cy.request({ - method: 'POST', - url: `/api/v1/users/signup`, - headers: { Authorization: `Bearer ${token}` }, - body: user2, - }).then((response) => { - user2Id = response.body.id; - }); - - Object.entries(entities).forEach(([key, value]) => { - cy.request({ - method: 'POST', - headers: { Authorization: `Bearer ${token}` }, - url: value.request.url, - body: value.request.body1, - }).then((response) => { - entities[key].id1 = response.body.id; - }); - cy.request({ - method: 'POST', - headers: { Authorization: `Bearer ${token}` }, - url: value.request.url, - body: value.request.body2, - }).then((response) => { - entities[key].id2 = response.body.id; - }); - }); - }); - cy.logout(); -}; - -const cleanUp = () => { - cy.login(); - cy.getAllLocalStorage().then((data) => { - const token = getToken(data); - hardDeleteService({ - token, - serviceFqn: DATABASE_SERVICE.service.name, - serviceType: SERVICE_CATEGORIES.DATABASE_SERVICES, - }); - SINGLE_LEVEL_SERVICE.forEach((data) => { - hardDeleteService({ - token, - serviceFqn: data.service.name, - serviceType: data.serviceType, - }); - }); - [user1Id, user2Id].forEach((id) => { - cy.request({ - method: 'DELETE', - url: `/api/v1/users/${id}?hardDelete=true&recursive=false`, - headers: { Authorization: `Bearer ${token}` }, - }); - }); - cy.request({ - method: 'DELETE', - url: `/api/v1/teams/${team.id}?hardDelete=true&recursive=true`, - headers: { Authorization: `Bearer ${token}` }, - }); - }); -}; - -describe('My Data page', { tags: 'DataAssets' }, () => { - before(prepareData); - after(cleanUp); - - it('Verify my data widget', () => { - // login with newly created user - cy.login(user1.email, user1.password); - cy.get('[data-testid="my-data-widget"]').scrollIntoView(); - - // verify total count - cy.get('[data-testid="my-data-total-count"]') - .invoke('text') - .should('eq', '(20)'); - cy.get( - '[data-testid="my-data-widget"] [data-testid="view-all-link"]' - ).click(); - verifyEntities({ - url: '/api/v1/search/query?q=*&index=all&from=0&size=25', - }); - - cy.logout(); - }); - - it('Verify following widget', () => { - // login with newly created user - cy.login(user1.email, user1.password); - cy.get('[data-testid="following-widget"]').scrollIntoView(); - - // verify total count - cy.get('[data-testid="following-data-total-count"]') - .invoke('text') - .should('eq', '(20)'); - cy.get('[data-testid="following-data"]').click(); - verifyEntities({ - url: '/api/v1/search/query?q=*followers:*&index=all&from=0&size=25', - }); - cy.logout(); - }); - - // Todo: Fix the following tests once new layout is implemented for feed https://github.com/open-metadata/OpenMetadata/issues/13871 @Ashish8689 @aniketkatkar97 - - it.skip('Verify user as owner feed widget', () => { - // login with newly created user - cy.login(user2.email, user2.password); - cy.get('[data-testid="no-data-placeholder-container"]') - .scrollIntoView() - .should( - 'contain', - // eslint-disable-next-line max-len - "Right now, there are no updates in the data assets you own or follow. Haven't explored yet? Dive in and claim ownership or follow the data assets that interest you to stay informed about their latest activities!" - ); - - Object.entries(entities).forEach(([key, value]) => { - updateOwnerAndVerify({ - url: `${value.request.url}/${value.id1}`, - body: [ - { - op: 'add', - path: '/owner', - value: { id: user2Id, type: 'user' }, - }, - ], - type: key, - entityName: value.request.body1.name, - newOwner: `${user2.firstName}${user2.lastName}`, - }); - }); - cy.logout(); - }); - - it.skip('Verify team as owner feed widget', () => { - // login with newly created user - cy.login(user1.email, user1.password); - - Object.entries(entities).forEach(([key, value]) => { - updateOwnerAndVerify({ - url: `${value.request.url}/${value.id2}`, - body: [ - { - op: 'add', - path: '/owner', - value: { id: team.id, type: 'team' }, - }, - ], - type: key, - entityName: value.request.body2.name, - newOwner: team.name, - }); - }); - cy.logout(); - }); -}); diff --git a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Permission.spec.ts b/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Permission.spec.ts index 6b472b8f9645..196dec909cd8 100644 --- a/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Permission.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/cypress/e2e/Pages/Permission.spec.ts @@ -417,7 +417,11 @@ describe('Permissions', { tags: 'Settings' }, () => { serviceName: DATABASE_SERVICE.service.name, entity: EntityType.Table, }); - interceptURL('GET', '/api/v1/dataQuality/testCases?fields=*', 'testCase'); + interceptURL( + 'GET', + '/api/v1/dataQuality/testCases/search/list?fields=*', + 'testCase' + ); cy.get('[data-testid="profiler"]').click(); cy.get('[data-testid="profiler-tab-left-panel"]') .contains('Data Quality') diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Features/IncidentManager.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Features/IncidentManager.spec.ts index b103e2b0c69b..12914ee56eea 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Features/IncidentManager.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Features/IncidentManager.spec.ts @@ -230,7 +230,7 @@ test.describe('Incident Manager', () => { await test.step('Resolve task from incident list page', async () => { await visitProfilerTab(page, table1); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page .getByTestId('profiler-tab-left-panel') @@ -276,7 +276,7 @@ test.describe('Incident Manager', () => { await test.step('Task should be closed', async () => { await visitProfilerTab(page, table1); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page .getByTestId('profiler-tab-left-panel') @@ -372,7 +372,7 @@ test.describe('Incident Manager', () => { await test.step("Verify incident's status on DQ page", async () => { await visitProfilerTab(page, table1); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page .getByTestId('profiler-tab-left-panel') diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/DataQualityAndProfiler.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/DataQualityAndProfiler.spec.ts index 693cb4653911..0fd1f4b75312 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/DataQualityAndProfiler.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/DataQualityAndProfiler.spec.ts @@ -129,7 +129,7 @@ test('Table test case', async ({ page }) => { ).toBeVisible(); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page.click(`[data-testid="view-service-button"]`); await testCaseResponse; @@ -209,7 +209,7 @@ test('Column test case', async ({ page }) => { await page.waitForSelector('[data-testid="view-service-button"]'); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page.click(`[data-testid="view-service-button"]`); await testCaseResponse; @@ -929,6 +929,25 @@ test('TestCase filters', async ({ page }) => { await getTestCaseList; await verifyFilterTestCase(page); await verifyFilter2TestCase(page, true); + await visitDataQualityTab(page, filterTable1); + const searchTestCase = page.waitForResponse( + (url) => + url.url().includes('/api/v1/dataQuality/testCases/search/list') && + url.url().includes(testCases[0]) + ); + await page + .getByTestId('table-profiler-container') + .getByTestId('searchbar') + .fill(testCases[0]); + await searchTestCase; + + await expect(page.locator(`[data-testid="${testCases[0]}"]`)).toBeVisible(); + await expect( + page.locator(`[data-testid="${testCases[1]}"]`) + ).not.toBeVisible(); + await expect( + page.locator(`[data-testid="${testCases[2]}"]`) + ).not.toBeVisible(); } finally { await filterTable1.delete(apiContext); await domain.delete(apiContext); diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/MyData.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/MyData.spec.ts new file mode 100644 index 000000000000..0100e3e9cb5a --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/MyData.spec.ts @@ -0,0 +1,119 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { expect, Page, test as base } from '@playwright/test'; +import { TableClass } from '../../support/entity/TableClass'; +import { UserClass } from '../../support/user/UserClass'; +import { performAdminLogin } from '../../utils/admin'; +import { redirectToHomePage } from '../../utils/common'; +import { addMultiOwner, followEntity } from '../../utils/entity'; +import { verifyEntities } from '../../utils/myData'; + +const user = new UserClass(); +const TableEntities = Array(20) + .fill(undefined) + .map(() => new TableClass()); + +const test = base.extend<{ page: Page }>({ + page: async ({ browser }, use) => { + const Page = await browser.newPage(); + await user.login(Page); + await use(Page); + await Page.close(); + }, +}); + +test.describe('My Data page', () => { + test.beforeAll('Setup pre-requests', async ({ browser }) => { + const { apiContext, afterAction } = await performAdminLogin(browser); + await user.create(apiContext); + for (const table of TableEntities) { + await table.create(apiContext); + } + await afterAction(); + }); + + test.afterAll('Cleanup', async ({ browser }) => { + const { apiContext, afterAction } = await performAdminLogin(browser); + await user.delete(apiContext); + for (const table of TableEntities) { + await table.delete(apiContext); + } + await afterAction(); + }); + + test.beforeEach('Visit entity details page', async ({ page }) => { + await redirectToHomePage(page); + }); + + test('Verify MyData and Following widget', async ({ page }) => { + test.slow(true); + + await test.step( + 'Set user as the Owner of the Table and also Follow it', + async () => { + for (const table of TableEntities) { + await table.visitEntityPage(page); + await addMultiOwner({ + page, + ownerNames: [user.getUserName()], + activatorBtnDataTestId: 'edit-owner', + resultTestId: 'data-assets-header', + endpoint: table.endpoint, + type: 'Users', + }); + await followEntity(page, table.endpoint); + } + } + ); + + await test.step('Verify my data widget', async () => { + await redirectToHomePage(page); + // Verify total count + const totalCount = await page + .locator('[data-testid="my-data-total-count"]') + .innerText(); + + expect(totalCount).toBe('(20)'); + + await page + .locator('[data-testid="my-data-widget"] [data-testid="view-all-link"]') + .click(); + + // Verify entities + await verifyEntities( + page, + '/api/v1/search/query?q=*&index=all&from=0&size=25', + TableEntities + ); + }); + + await test.step('Verify following widget', async () => { + await redirectToHomePage(page); + // Verify total count + const totalCount = await page + .locator('[data-testid="following-data-total-count"]') + .innerText(); + + expect(totalCount).toBe('(20)'); + + await page.locator('[data-testid="following-data"]').click(); + + // Verify entities + await verifyEntities( + page, + '/api/v1/search/query?q=*followers:*&index=all&from=0&size=25', + TableEntities + ); + }); + }); +}); diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/TestCases.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/TestCases.spec.ts index 79030666d1ef..96313f5a013f 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/TestCases.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/Pages/TestCases.spec.ts @@ -86,7 +86,7 @@ test('Table difference test case', async ({ page }) => { await page.getByTestId('submit-test').click(); await createTestCaseResponse; const tableTestResponse = page.waitForResponse( - `/api/v1/dataQuality/testCases?fields=*` + `/api/v1/dataQuality/testCases/search/list?fields=*` ); await page.getByTestId('view-service-button').click(); await tableTestResponse; @@ -124,7 +124,7 @@ test('Table difference test case', async ({ page }) => { 'Test case updated successfully.' ); - await page.getByTestId('content-wrapper').getByLabel('close').click(); + await page.getByLabel('close', { exact: true }).click(); }); await test.step('Delete', async () => { @@ -183,7 +183,7 @@ test('Custom SQL Query', async ({ page }) => { await page.getByTestId('submit-test').click(); await createTestCaseResponse; const tableTestResponse = page.waitForResponse( - `/api/v1/dataQuality/testCases?fields=*` + `/api/v1/dataQuality/testCases/search/list?fields=*` ); await page.getByTestId('view-service-button').click(); await tableTestResponse; @@ -224,7 +224,7 @@ test('Custom SQL Query', async ({ page }) => { 'Test case updated successfully.' ); - await page.getByTestId('content-wrapper').getByLabel('close').click(); + await page.getByLabel('close', { exact: true }).click(); }); await test.step('Delete', async () => { @@ -287,7 +287,7 @@ test('Column Values To Be Not Null', async ({ page }) => { await page.waitForSelector('[data-testid="success-line"]'); await page.waitForSelector('[data-testid="view-service-button"]'); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page.click(`[data-testid="view-service-button"]`); await testCaseResponse; @@ -325,7 +325,7 @@ test('Column Values To Be Not Null', async ({ page }) => { 'Test case updated successfully.' ); - await page.getByTestId('content-wrapper').getByLabel('close').click(); + await page.getByLabel('close', { exact: true }).click(); }); await test.step('Delete', async () => { diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/EntityVersionPages.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/EntityVersionPages.spec.ts index 6d69307c9281..6b8ae01172a2 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/EntityVersionPages.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/EntityVersionPages.spec.ts @@ -224,9 +224,6 @@ entities.forEach((EntityClass) => { await page.waitForSelector('[role="dialog"].ant-modal'); await expect(page.locator('[role="dialog"].ant-modal')).toBeVisible(); - await expect(page.locator('.ant-modal-title')).toContainText( - entity.entity.name - ); await page.fill('[data-testid="confirmation-text-input"]', 'DELETE'); const deleteResponse = page.waitForResponse( diff --git a/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/ServiceEntityVersionPage.spec.ts b/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/ServiceEntityVersionPage.spec.ts index 8f1e4d63ba85..895c59e86627 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/ServiceEntityVersionPage.spec.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/e2e/VersionPages/ServiceEntityVersionPage.spec.ts @@ -189,9 +189,6 @@ entities.forEach((EntityClass) => { await page.waitForSelector('[role="dialog"].ant-modal'); await expect(page.locator('[role="dialog"].ant-modal')).toBeVisible(); - await expect(page.locator('.ant-modal-title')).toContainText( - entity.entity.name - ); await page.fill('[data-testid="confirmation-text-input"]', 'DELETE'); const deleteResponse = page.waitForResponse( diff --git a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts index f1e9a4a42067..c5a408a4e7ca 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/support/entity/ingestion/ServiceBaseClass.ts @@ -176,10 +176,10 @@ class ServiceBaseClass { .getByLabel('Ingestions') .getByTestId('loader') .waitFor({ state: 'detached' }); - + // need manual wait to settle down the deployed pipeline, before triggering the pipeline await page.waitForTimeout(2000); - + await page.getByTestId('more-actions').first().click(); await page.getByTestId('run-button').click(); diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/myData.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/myData.ts new file mode 100644 index 000000000000..42e5984528b3 --- /dev/null +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/myData.ts @@ -0,0 +1,35 @@ +/* + * Copyright 2024 Collate. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { expect, Page } from '@playwright/test'; +import { TableClass } from '../support/entity/TableClass'; + +export const verifyEntities = async ( + page: Page, + url: string, + tables: TableClass[] +) => { + // Change pagination size to 25 + const fetchResponse = page.waitForResponse(url); + await page.locator('[data-testid="pagination"] .ant-btn-default').click(); + await page.locator('[role="menu"] [value="25"]').click(); + await fetchResponse; + + // Verify all tables are present + for (const table of tables) { + await expect( + page.locator( + `[data-testid="table-data-card_${table.entityResponseData?.['fullyQualifiedName']}"]` + ) + ).toBeVisible(); + } +}; diff --git a/openmetadata-ui/src/main/resources/ui/playwright/utils/testCases.ts b/openmetadata-ui/src/main/resources/ui/playwright/utils/testCases.ts index 625171ac35b6..00781967ff77 100644 --- a/openmetadata-ui/src/main/resources/ui/playwright/utils/testCases.ts +++ b/openmetadata-ui/src/main/resources/ui/playwright/utils/testCases.ts @@ -32,7 +32,7 @@ export const visitDataQualityTab = async (page: Page, table: TableClass) => { await table.visitEntityPage(page); await page.getByTestId('profiler').click(); const testCaseResponse = page.waitForResponse( - '/api/v1/dataQuality/testCases?fields=*' + '/api/v1/dataQuality/testCases/search/list?fields=*' ); await page .getByTestId('profiler-tab-left-panel') diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/QualityTab/QualityTab.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/QualityTab/QualityTab.component.tsx index 1e87e6e04062..46f11cb9f431 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/QualityTab/QualityTab.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/QualityTab/QualityTab.component.tsx @@ -37,6 +37,7 @@ import { import { getAddDataQualityTableTestPath } from '../../../../../utils/RouterUtils'; import NextPrevious from '../../../../common/NextPrevious/NextPrevious'; import { NextPreviousProps } from '../../../../common/NextPrevious/NextPrevious.interface'; +import Searchbar from '../../../../common/SearchBarComponent/SearchBar.component'; import TabsLabel from '../../../../common/TabsLabel/TabsLabel.component'; import { SummaryPanel } from '../../../../DataQuality/SummaryPannel/SummaryPanel.component'; import TestSuitePipelineTab from '../../../../DataQuality/TestSuite/TestSuitePipelineTab/TestSuitePipelineTab.component'; @@ -76,24 +77,35 @@ export const QualityTab = () => { const [selectedTestCaseStatus, setSelectedTestCaseStatus] = useState('' as TestCaseStatus); const [selectedTestType, setSelectedTestType] = useState(TestCaseType.all); + const [searchValue, setSearchValue] = useState(); const testSuite = useMemo(() => table?.testSuite, [table]); const handleTestCasePageChange: NextPreviousProps['pagingHandler'] = ({ - cursorType, currentPage, }) => { - if (cursorType) { + if (currentPage) { fetchAllTests({ - [cursorType]: paging[cursorType], testCaseType: selectedTestType, testCaseStatus: isEmpty(selectedTestCaseStatus) ? undefined : selectedTestCaseStatus, + offset: (currentPage - 1) * pageSize, }); } handlePageChange(currentPage); }; + const handleSearchTestCase = (value?: string) => { + setSearchValue(value); + fetchAllTests({ + testCaseType: selectedTestType, + testCaseStatus: isEmpty(selectedTestCaseStatus) + ? undefined + : selectedTestCaseStatus, + q: value, + }); + }; + const tableBreadcrumb = useMemo(() => { return table ? [ @@ -118,6 +130,15 @@ export const QualityTab = () => { key: EntityTabs.TEST_CASES, children: ( + + + { @@ -136,6 +157,7 @@ export const QualityTab = () => { {showPagination && ( { )); }); +jest.mock('../../../../common/SearchBarComponent/SearchBar.component', () => { + return jest + .fn() + .mockImplementation(() => ( + + )); +}); jest.mock('../../DataQualityTab/DataQualityTab', () => { return jest .fn() @@ -128,6 +135,7 @@ describe('QualityTab', () => { expect(await screen.findByTestId('sub-heading')).toHaveTextContent( 'message.page-sub-header-for-data-quality' ); + expect(await screen.findByTestId('mock-searchbar')).toBeInTheDocument(); expect( await screen.findByText('label.test-case-plural') ).toBeInTheDocument(); @@ -154,7 +162,7 @@ describe('QualityTab', () => { mockUseTableProfiler.testCasePaging.handlePageChange ).toHaveBeenCalledWith(2); expect(mockUseTableProfiler.fetchAllTests).toHaveBeenCalledWith({ - after: 'after', + offset: 10, testCaseStatus: undefined, testCaseType: 'all', }); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfiler.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfiler.interface.ts index 1fc8b74ada9d..f3d53112978d 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfiler.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfiler.interface.ts @@ -26,7 +26,7 @@ import { } from '../../../../generated/entity/data/table'; import { TestCase, TestSummary } from '../../../../generated/tests/testCase'; import { UsePagingInterface } from '../../../../hooks/paging/usePaging'; -import { ListTestCaseParams } from '../../../../rest/testAPI'; +import { ListTestCaseParamsBySearch } from '../../../../rest/testAPI'; export interface TableProfilerProps { permissions: OperationPermission; @@ -50,7 +50,7 @@ export interface TableProfilerContextInterface { overallSummary: OverallTableSummaryType[]; onTestCaseUpdate: (testCase?: TestCase) => void; onSettingButtonClick: () => void; - fetchAllTests: (params?: ListTestCaseParams) => Promise; + fetchAllTests: (params?: ListTestCaseParamsBySearch) => Promise; onCustomMetricUpdate: (table: Table) => void; isProfilingEnabled: boolean; dateRangeObject: DateRangeObject; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.test.tsx index 68f3a8e22767..1961dfa5f24b 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.test.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.test.tsx @@ -15,7 +15,7 @@ import { render, screen } from '@testing-library/react'; import React from 'react'; import { OperationPermission } from '../../../../context/PermissionProvider/PermissionProvider.interface'; import { MOCK_TABLE } from '../../../../mocks/TableData.mock'; -import { getListTestCase } from '../../../../rest/testAPI'; +import { getListTestCaseBySearch } from '../../../../rest/testAPI'; import { TableProfilerProvider } from './TableProfilerProvider'; // Mock dependencies @@ -38,7 +38,9 @@ jest.mock('../../../../rest/tableAPI', () => ({ getTableDetailsByFQN: jest.fn().mockResolvedValue({}), })); jest.mock('../../../../rest/testAPI', () => ({ - getListTestCase: jest.fn().mockResolvedValue({ data: [], paging: {} }), + getListTestCaseBySearch: jest + .fn() + .mockResolvedValue({ data: [], paging: {} }), })); jest.mock('../../../../utils/ToastUtils', () => ({ showErrorToast: jest.fn(), @@ -84,7 +86,7 @@ describe('TableProfilerProvider', () => { }); it('test cases should be fetch on data quality tab', async () => { - const mockGetListTestCase = getListTestCase as jest.Mock; + const mockGetListTestCase = getListTestCaseBySearch as jest.Mock; expect(mockGetListTestCase).toHaveBeenCalledTimes(1); expect(mockGetListTestCase).toHaveBeenCalledWith({ diff --git a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.tsx b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.tsx index c295bb6c3db4..f0a8dd50459a 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/Database/Profiler/TableProfiler/TableProfilerProvider.tsx @@ -39,7 +39,10 @@ import { getLatestTableProfileByFqn, getTableDetailsByFQN, } from '../../../../rest/tableAPI'; -import { getListTestCase, ListTestCaseParams } from '../../../../rest/testAPI'; +import { + getListTestCaseBySearch, + ListTestCaseParamsBySearch, +} from '../../../../rest/testAPI'; import { bytesToSize } from '../../../../utils/StringsUtils'; import { generateEntityLink } from '../../../../utils/TableUtils'; import { showErrorToast } from '../../../../utils/ToastUtils'; @@ -202,10 +205,10 @@ export const TableProfilerProvider = ({ } }; - const fetchAllTests = async (params?: ListTestCaseParams) => { + const fetchAllTests = async (params?: ListTestCaseParamsBySearch) => { setIsTestsLoading(true); try { - const { data, paging } = await getListTestCase({ + const { data, paging } = await getListTestCaseBySearch({ ...params, fields: [ TabSpecificField.TEST_CASE_RESULT, diff --git a/openmetadata-ui/src/main/resources/ui/yarn.lock b/openmetadata-ui/src/main/resources/ui/yarn.lock index 5455d79961ca..8b63c628cbac 100644 --- a/openmetadata-ui/src/main/resources/ui/yarn.lock +++ b/openmetadata-ui/src/main/resources/ui/yarn.lock @@ -5628,10 +5628,10 @@ bluebird@^3.7.2: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -5641,7 +5641,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -5788,6 +5788,17 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + call-me-maybe@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" @@ -6820,6 +6831,15 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -7189,6 +7209,11 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + end-of-stream@^1.1.0, end-of-stream@^1.4.1: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -7360,6 +7385,18 @@ es-cookie@^1.3.2: resolved "https://registry.yarnpkg.com/es-cookie/-/es-cookie-1.3.2.tgz#80e831597f72a25721701bdcb21d990319acd831" integrity sha512-UTlYYhXGLOy05P/vKVT2Ui7WtC7NiRzGtJyAKKn32g5Gvcjn7KAClLPWlipCtxIus934dFg9o9jXiBL0nP+t9Q== +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-module-lexer@^1.2.1: version "1.5.4" resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" @@ -7762,36 +7799,36 @@ expect@^26.6.2: jest-regex-util "^26.0.0" express@^4.17.1: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -7979,13 +8016,13 @@ filter-console@^0.1.1: resolved "https://registry.yarnpkg.com/filter-console/-/filter-console-0.1.1.tgz#6242be28982bba7415bcc6db74a79f4a294fa67c" integrity sha512-zrXoV1Uaz52DqPs+qEwNJWJFAWZpYJ47UNmpN9q4j+/EYsz85uV0DC9k8tRND5kYmoVzL0W+Y75q4Rg8sRJCdg== -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -8173,6 +8210,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -8226,6 +8268,17 @@ get-intrinsic@^1.1.3: has "^1.0.3" has-symbols "^1.0.3" +get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-node-dimensions@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/get-node-dimensions/-/get-node-dimensions-1.2.1.tgz#fb7b4bb57060fb4247dd51c9d690dfbec56b0823" @@ -8452,6 +8505,18 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + has-symbols@^1.0.1, has-symbols@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" @@ -8476,6 +8541,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + he@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" @@ -10313,10 +10385,10 @@ memory-fs@^0.5.0: errno "^0.1.3" readable-stream "^2.0.1" -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -10695,6 +10767,11 @@ object-inspect@^1.12.2: resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + object-is@^1.0.1: version "1.1.5" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" @@ -11063,10 +11140,10 @@ path-parse@^1.0.6: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-to-regexp@^1.7.0: version "1.8.0" @@ -11720,12 +11797,12 @@ qs@6.10.3: dependencies: side-channel "^1.0.4" -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" qs@^6.10.2: version "6.11.2" @@ -13150,10 +13227,10 @@ semver-compare@^1.0.0: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -13203,21 +13280,33 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" @@ -13290,6 +13379,16 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + sigmund@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590"