diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 735b76e5a..9d2d9e746 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,6 +1,6 @@ ## Description "n" FeatureName: contains + Project "1" --> "n" Anchor: contains + FeatureName "1" --> "n" Feature: contains + FeatureName --> "Optional" SemanticVersion: contains + FeatureName --> "Optional" FeatureType: contains + Anchor "1" --> "n" AnchorFeature: contains + Anchor --> DataSource: contains + Feature <|-- AnchorFeature: extends + Feature <|-- DerivedFeature: extends + Feature --> Transformation: contains + Feature --> Source: contains + Transformation --> Function: contains + Source <|-- DataSource: extends + DataSource --> "Optional" Clazz: contains + DataSource --> "Optional" Function: contains + Source <|-- MultiFeatureSource: extends + MultiFeatureSource "1" --> "1..n" FeatureSource: contains + AnchorFeature --> DataSource: contains + DerivedFeature --> MultiFeatureSource: contains + FeathrModel <|-- Project: extends + FeathrModel <|-- FeatureName: extends + FeathrModel <|-- Anchor: extends + FeathrModel <|-- Feature: extends + FeathrModel <|-- Source: extends + Dimension --> DimensionType: contains + TensorFeatureFormat --> TensorCategory: contains + TensorFeatureFormat --> ValueType: contains + TensorFeatureFormat "1" --> "1..n" Dimension: contains + FeatureType --> FeatureValueType: contains + FeatureType --> "Optional" TensorFeatureFormat: contains + Window --> WindowTimeUnit: contains + Function <|-- MvelExpression: extends + Function <|-- UserDefinedFunction: extends + Function <|-- SparkSqlExpression: extends + SlidingWindowAggregation --> SparkSqlExpression: contains + SlidingWindowAggregation --> SlidingWindowAggregationType: contains + SlidingWindowAggregation --> Window: contains + SlidingWindowEmbeddingAggregation --> SparkSqlExpression: contains + SlidingWindowEmbeddingAggregation --> SlidingWindowEmbeddingAggregationType: contains + SlidingWindowEmbeddingAggregation --> Window: contains + SlidingWindowLatestAvailable --> SparkSqlExpression: contains + SlidingWindowLatestAvailable --> Window: contains + Function <|-- SlidingWindowAggregation: extends + Function <|-- SlidingWindowEmbeddingAggregation: extends + Function <|-- SlidingWindowLatestAvailable: extends + + class ValueType{ + <> + INT + LONG + FLOAT + DOUBLE + STRING + BOOLEAN + BYTES + } + class DimensionType{ + <> + INT + LONG + STRING + BOOLEAN + BYTES + } + class TensorCategory{ + <> + DENSE + SPARSE + RAGGED + } + class FeatureValueType{ + <> + BOOLEAN + NUMERIC + CATEGORICAL + CATEGORICAL_SET + DENSE_VECTOR + TERM_VECTOR + TENSOR + UNSPECIFIED + } + class Dimension{ + +DimensionType type + +Optional[str] shape + } + class TensorFeatureFormat{ + +TensorCategory tensorCategory + +ValueType valueType + +List[Dimension] dimensions + } + class FeatureType{ + +FeatureValueType type + +Optional[TensorFeatureFormat] format + +Union[bool, int, float, str, types] defaultValue + } + class Clazz{ + +str fullyQualifiedName + } + class Function{ + +str expression + } + class MvelExpression{ + +str mvel + } + class UserDefinedFunction{ + +str sql + } + class SemanticVersion{ + +int majorVersion + +int minorVersion + +int patchVersion + +Optional[str] metadata + } + class FeathrModel{ + +str displayName + +str typeName + } + class SlidingWindowAggregationType{ + <> + SUM + COUNT + MAX + MIN + AVG + } + class SlidingWindowEmbeddingAggregationType{ + <> + MAX_POOLING + MIN_POOLING + AVG_POOLING + } + class WindowTimeUnit{ + <> + DAY + HOUR + MINUTE + SECOND + } + class Window{ + +int size + +WindowTimeUnit unit + } + class SlidingWindowAggregation{ + +SlidingWindowAggregationType aggregationType + +Window window + +SparkSqlExpression targetColumn + +Optional[SparkSqlExpression] filter + +Optional[SparkSqlExpression] groupBy + +Optional[int] limit + } + class SlidingWindowEmbeddingAggregation{ + +SlidingWindowEmbeddingAggregationType aggregationType + +Window window + +SparkSqlExpression targetColumn + +Optional[SparkSqlExpression] filter + +Optional[SparkSqlExpression] groupBy + } + class SlidingWindowLatestAvailable{ + +Optional[Window] window + +SparkSqlExpression targetColumn + +Optional[SparkSqlExpression] filter + +Optional[SparkSqlExpression] groupBy + +Optional[int] limit + } + class Source{ + } + class DataSource{ + +Optional[Clazz] clazz + +Optional[Function] keyFunction + } + class FeatureSource{ + +FeatureNameId input_feature_name_id + +Optional[str] alias + } + class MultiFeatureSource{ + +List[FeatureSource] sources + } + class Transformation{ + +Function transformationFunction + } + class Feature{ + +FeatureId id + +FeatureNameId feature_namme_id + +Source source + +Transformation transformation + } + class AnchorFeature{ + +AnchorId anchor_id + +DataSource source + } + class DerivedFeature{ + +MultiFeatureSource source + } + class FeatureName{ + +FeatureNameId id + +ProjectId project_id + +List[FeatureId] feature_ids + +Optional[SemanticVersion] semanticVersion + +Optional[FeatureType] featureType + } + class Project{ + +ProjectId id + +List[FeatureNameId] feature_name_ids + +List[AnchorId] anchor_ids + } + class Anchor{ + +AnchorId id + +ProjectId project_id + +DataSource source + +List[FeatureId] anchor_feature_ids + } +``` \ No newline at end of file diff --git a/registry/data-models/models.py b/registry/data-models/models.py new file mode 100644 index 000000000..c230240ab --- /dev/null +++ b/registry/data-models/models.py @@ -0,0 +1,147 @@ +from registry.data-models.transformation.models import * +from registry.data-models.common.models import SemanticVersion, FeathrModel, Function +from typing import Optional +from pydantic import BaseModel + + +""" +This file defines abstract backend data models for feature registry. +Backend data models will be used by backend API server to talk to feature registry backend. +Purpose of this is to decouple backend data models from API specific data models. +Diagram of the data models: ./data-model-diagram.md +""" + + +class FeatureId(BaseModel): + """ + Id for Feature, it's unique ID represents Feature. + Id can be a simple string, int or complex key. + """ + id: str # id of a feature + + +class FeatureNameId(BaseModel): + """ + Id for FeatureName, it's unique ID represents FeatureName. + Id can be a simple string, int or complex key. + """ + id: str # id of a FeatureName + + +class AnchorId(BaseModel): + """ + Id for Anchor, it's unique ID represents Anchor. + Id can be a simple string, int or complex key. + """ + id: str # id of a anchor + + +class ProjectId(BaseModel): + """ + Id for Project, it's unique ID represents Project. + Id can be a simple string, int or complex key. + """ + id: str # id of a project + + +class Source(FeathrModel): + pass + + +class DataSource(Source): + """ + Data source of the feature. + It defines the raw data source the feature is extracted from. + """ + clazz: Optional[Clazz] # Fully qualified Java class name for data model + keyFunction: Optional[Function] + + +class FeatureSource(BaseModel): + """ + Represents a feature source for a derived feature. That is, it is a source 'FeatureName' which is used for + creating other derived features. + """ + input_feature_name_id: FeatureNameId # Input feature name Key + alias: Optional[str] # A feature's alias to be used in transformation function. + + +class MultiFeatureSource(Source): + """ + Feature sources of the feature. + It defines one to many features where the feature is derived from. + """ + sources: List[FeatureSource] # All source features which the feature is derived from + + +class Transformation(BaseModel): + """ + The transformation of a Feature. + A transformation function represents the transformation logic to produce feature value from the source of FeatureAnchor + """ + transformationFunction: Function + + +class Feature(FeathrModel): + """ + Actual implementation of FeatureName. + An implementation defines where a feature is extracted from (Source) and how it is computed (Transformation). + The Source of a feature can be raw data sources and/or other features. + """ + id: FeatureId # Unique ID for Feature + feature_name_id: FeatureNameId # Id of the feature name that the feature belongs to + source: Source # Source can be either data source or feature source + transformation: Transformation # transformation logic to produce feature value + + +class AnchorFeature(Feature): + """ + Feature implementation of FeatureName which anchored to a data source. + """ + anchor_id: AnchorId # ID of the anchor this feature belongs to + source: DataSource # Raw data source where the feature is extracted from + + +class DerivedFeature(Feature): + """ + Feature implementation that is derived from other FeatureNames. + """ + source: MultiFeatureSource # Source features where the feature is derived from + + +class FeatureName(FeathrModel): + """ + Named Feature Interface that can be backed by multiple Feature implementations across + different environments accessing different sources (data lake access for batch training, + KV store access for online serving). Each FeatureName is defined by feature producer. + Feature consumers reference a feature by that name to access that feature data, + agnostic of runtime environment. Each FeatureName also encloses attributes that does not + change across implementations. + """ + id: FeatureNameId # unique ID for FeatureName, used to extract data for current FeatureName + project_id: ProjectId # ID of the project the FeatureName belongs to + feature_ids: List[FeatureId] # List of ids of feature that the FeatureName has + semanticVersion: Optional[SemanticVersion] # Semantic version associated with this FeatureName + featureType: Optional[FeatureType] # Information about featureName, like feature type, format and value. + + +class Project(FeathrModel): + """ + Group of FeatureNames. It can be a project the team is working on, + or a namespace which related FeatureNames have. + """ + id: ProjectId # Unique ID of the project. + feature_name_ids: List[FeatureNameId] # List of feature name ids that the project has + anchor_ids: List[AnchorId] # List of Anchor ids that the project has + + +class Anchor(FeathrModel): + """ + Group of AnchorFeatures which anchored on same DataSource. + This is mainly used by feature producer gather information about DataSource + and FeatureImplementations associated with the DataSource. + """ + id: AnchorId # Unique ID for Anchor + project_id: ProjectId # ID of Project that the anchor belongs to + source: DataSource # data source of the Anchor + anchor_feature_ids: List[FeatureId] # List of anchor features that the anchor has diff --git a/registry/data-models/transformation/__init__.py b/registry/data-models/transformation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/registry/data-models/transformation/models.py b/registry/data-models/transformation/models.py new file mode 100644 index 000000000..b721d174e --- /dev/null +++ b/registry/data-models/transformation/models.py @@ -0,0 +1,84 @@ +from registry.data-models.common.models import * +from typing import Optional + + +class SlidingWindowAggregationType(Enum): + """ + Represents supported types of aggregation. + """ + SUM = "sum" + COUNT = "count" + MAX = "maximum" + MIN = "minium" + AVG = "average" + + +class SlidingWindowEmbeddingAggregationType(Enum): + """ + Represents supported types for embedding aggregation. + Pooling is a sample-based discretization process. The objective is to down-sample an input + representation and reduce its dimensionality. + """ + MAX_POOLING = "max_pooling" # Max pooling is done by applying a max filter to (usually) non-overlapping subregions of the initial representation + MIN_POOLING = "min_pooling" # Min pooling is done by applying a min filter to (usually) non-overlapping subregions of the initial representation. + AVG_POOLING = "avg_pooling" # Average pooling is done by applying a average filter to (usually) non-overlapping subregions of the initial representation + + +class WindowTimeUnit(Enum): + """ + Represents a unit of time. + """ + DAY = "day" + HOUR = "hour" + MINUTE = "minute" + SECOND = "second" + + +class Window(BaseModel): + """ + Represents a time window used in sliding window algorithms. + """ + size: int # Represents the duration of the window. + unit: WindowTimeUnit + + +class SlidingWindowAggregation(Function): + """ + Sliding window aggregation produces feature data by aggregating a collection of data within a given time + interval into an aggregate value. It ensures point-in-time correctness, when joining with label data, + it looks back the configurable time window from each entry's timestamp and compute the aggregate value. + This class can be extended to support LateralView in aggregation. + """ + aggregationType: SlidingWindowAggregationType # Represents supported types of aggregation. + window: Window # Represents the time window to look back from label data's timestamp. + targetColumn: SparkSqlExpression # The target column to perform aggregation against. + filter: Optional[SparkSqlExpression] # Represents the filter statement before the aggregation. + groupBy: Optional[SparkSqlExpression] # Represents the target to be grouped by before aggregation. + limit: Optional[int] # Represents the max number of groups (with aggregation results) to return. + + +class SlidingWindowEmbeddingAggregation(Function): + """ + Sliding window embedding aggregation produces a single embedding by performing element-wise operations or + discretion on a collection of embeddings within a given time interval. It ensures point-in-time correctness, + when joining with label data, Frame looks back the configurable time window from each entry's timestamp and produce + the aggregated embedding. + """ + aggregationType: SlidingWindowEmbeddingAggregationType # Represents supported types for embedding aggregation. + window: Window # Represents the time window to look back from label data's timestamp. + targetColumn: SparkSqlExpression # The target column to perform aggregation against. + filter: Optional[SparkSqlExpression] # Represents the filter statement before the aggregation. + groupBy: Optional[SparkSqlExpression] # Represents the target to be grouped by before aggregation. + + +class SlidingWindowLatestAvailable(Function): + """ + This sliding window algorithm picks the latest available feature data from the source data. + Note the latest here means event time instead of processing time. + This class can be extended to support LateralView in aggregation. + """ + window: Optional[Window] # Represents the time window to look back from label data's timestamp. + targetColumn: SparkSqlExpression # The target column to perform aggregation against. + filter: Optional[SparkSqlExpression] # Represents the filter statement before the aggregation. + groupBy: Optional[SparkSqlExpression] # Represents the target to be grouped by before aggregation. + limit: Optional[int] # Represents the max number of groups (with aggregation results) to return. diff --git a/registry/purview-registry/api-spec.md b/registry/purview-registry/api-spec.md index 1b14cae8b..d2e82a878 100644 --- a/registry/purview-registry/api-spec.md +++ b/registry/purview-registry/api-spec.md @@ -277,6 +277,11 @@ List **names** of all projects. Response Type: `array` +### `GET /projects-ids` +Dictionary of **id** to **names** mapping of all projects. + +Response Type: `dict` + ### `GET /projects/{project}` Get everything defined in the project diff --git a/registry/purview-registry/main.py b/registry/purview-registry/main.py index 5818cd513..5d38adf74 100644 --- a/registry/purview-registry/main.py +++ b/registry/purview-registry/main.py @@ -48,6 +48,9 @@ def to_camel(s): def get_projects() -> list[str]: return registry.get_projects() +@router.get("/projects-ids") +def get_projects_ids() -> dict: + return registry.get_projects_ids() @router.get("/projects/{project}",tags=["Project"]) def get_projects(project: str) -> dict: @@ -62,6 +65,17 @@ def get_project_datasources(project: str) -> list: return list([to_camel(e.to_dict()) for e in sources]) +@router.get("/projects/{project}/datasources/{datasource}",tags=["Project"]) +def get_datasource(project: str, datasource: str) -> dict: + p = registry.get_entity(project,True) + for s in p.attributes.sources: + if str(s.id) == datasource: + return s + # If datasource is not found, raise 404 error + raise HTTPException( + status_code=404, detail=f"Data Source {datasource} not found") + + @router.get("/projects/{project}/features",tags=["Project"]) def get_project_features(project: str, keyword: Optional[str] = None) -> list: atlasEntities = registry.get_project_features(project, keywords=keyword) diff --git a/registry/purview-registry/registry/interface.py b/registry/purview-registry/registry/interface.py index 78e79cb88..7559a3f27 100644 --- a/registry/purview-registry/registry/interface.py +++ b/registry/purview-registry/registry/interface.py @@ -12,6 +12,13 @@ def get_projects(self) -> list[str]: """ pass + @abstractmethod + def get_projects_ids(self) -> dict: + """ + Returns the ids to names mapping of all projects + """ + pass + @abstractmethod def get_entity(self, id_or_name: Union[str, UUID],recursive = False) -> Entity: """ diff --git a/registry/purview-registry/registry/purview_registry.py b/registry/purview-registry/registry/purview_registry.py index 0d43dd6e6..15a650167 100644 --- a/registry/purview-registry/registry/purview_registry.py +++ b/registry/purview-registry/registry/purview_registry.py @@ -5,6 +5,9 @@ from urllib.error import HTTPError from uuid import UUID +from registry.models import to_snake +from pyapacheatlas.core.util import AtlasException + from azure.identity import DefaultAzureCredential from loguru import logger from pyapacheatlas.auth.azcredential import AzCredentialWrapper @@ -20,6 +23,9 @@ Label_BelongsTo = "BELONGSTO" Label_Consumes = "CONSUMES" Label_Produces = "PRODUCES" +TYPEDEF_DERIVED_FEATURE="feathr_derived_feature_v1" +TYPEDEF_ANCHOR_FEATURE="feathr_anchor_feature_v1" + TYPEDEF_ARRAY_ANCHOR=f"array" TYPEDEF_ARRAY_DERIVED_FEATURE=f"array" TYPEDEF_ARRAY_ANCHOR_FEATURE=f"array" @@ -47,7 +53,18 @@ def get_projects(self) -> list[str]: result = self.purview_client.discovery.query(filter=searchTerm) result_entities = result['value'] return [x['qualifiedName'] for x in result_entities] - + + def get_projects_ids(self) -> dict: + """ + Returns the names and ids of all projects""" + searchTerm = {"entityType": str(EntityType.Project)} + result = self.purview_client.discovery.query(filter=searchTerm) + result_entities = result['value'] + projects = {} + for x in result_entities: + projects[x['id']] = x['qualifiedName'] + return projects + def get_entity(self, id_or_name: Union[str, UUID],recursive = False) -> Entity: id = self.get_entity_id(id_or_name) if not id: @@ -557,17 +574,47 @@ def _register_feathr_feature_types(self): def _upload_entity_batch(self, entity_batch:list[AtlasEntity]): # we only support entity creation, update is not supported. # setting lastModifiedTS ==0 will ensure this, if another entity with ts>=1 exist - # upload funtion will fail with 412 Precondition fail. + # upload function will fail with 412 Precondition fail. for entity in entity_batch: - entity.lastModifiedTS="0" - results = self.purview_client.upload_entities( - batch=entity) - if results: - dict = {x.guid: x for x in entity_batch} - for k, v in results['guidAssignments'].items(): - dict[k].guid = v + self._upload_single_entity(entity) + + def _upload_single_entity(self, entity:AtlasEntity): + try: + """ + Try to find existing entity/process first, if found, return the existing entity's GUID + """ + id = self.get_entity_id(entity.qualifiedName) + response = self.purview_client.get_entity(id)['entities'][0] + j = entity.to_json() + if j["typeName"] == response["typeName"]: + if j["typeName"] == "Process": + if response["attributes"]["qualifiedName"] != j["attributes"]["qualifiedName"]: + raise RuntimeError("The requested entity %s conflicts with the existing entity in PurView" % j["attributes"]["qualifiedName"]) + else: + if "type" in response['attributes'] and response["typeName"] in (TYPEDEF_ANCHOR_FEATURE, TYPEDEF_DERIVED_FEATURE): + conf = ConfigFactory.parse_string(response['attributes']['type']) + response['attributes']['type'] = dict(conf) + keys = set([to_snake(key) for key in j["attributes"].keys()]) - set(["qualified_name"]) + keys.add("qualifiedName") + for k in keys: + if response["attributes"][k] != j["attributes"][k]: + raise RuntimeError("The requested entity %s conflicts with the existing entity in PurView" % j["attributes"]["qualifiedName"]) + entity.guid = response["guid"] + return else: - raise RuntimeError("Feature registration failed.", results) + raise RuntimeError("The requested entity %s conflicts with the existing entity in PurView" % j["attributes"]["qualifiedName"]) + except AtlasException as e: + pass + + entity.lastModifiedTS="0" + results = self.purview_client.upload_entities( + batch=entity) + if results: + d = {x.guid: x for x in [entity]} + for k, v in results['guidAssignments'].items(): + d[k].guid = v + else: + raise RuntimeError("Feature registration failed.", results) def _generate_fully_qualified_name(self, segments): return self.registry_delimiter.join(segments) diff --git a/registry/sql-registry/api-spec.md b/registry/sql-registry/api-spec.md index 1b14cae8b..d2e82a878 100644 --- a/registry/sql-registry/api-spec.md +++ b/registry/sql-registry/api-spec.md @@ -277,6 +277,11 @@ List **names** of all projects. Response Type: `array` +### `GET /projects-ids` +Dictionary of **id** to **names** mapping of all projects. + +Response Type: `dict` + ### `GET /projects/{project}` Get everything defined in the project diff --git a/registry/sql-registry/main.py b/registry/sql-registry/main.py index 00ac1d422..46cefbb34 100644 --- a/registry/sql-registry/main.py +++ b/registry/sql-registry/main.py @@ -1,10 +1,12 @@ import os +import traceback from typing import Optional from uuid import UUID from fastapi import APIRouter, FastAPI, HTTPException +from fastapi.responses import JSONResponse from starlette.middleware.cors import CORSMiddleware from registry import * -from registry.db_registry import DbRegistry +from registry.db_registry import DbRegistry, ConflictError from registry.models import AnchorDef, AnchorFeatureDef, DerivedFeatureDef, EntityType, ProjectDef, SourceDef, to_snake rp = "/" @@ -28,11 +30,57 @@ allow_headers=["*"], ) +def exc_to_content(e: Exception) -> dict: + content={"message": str(e)} + if os.environ.get("REGISTRY_DEBUGGING"): + content["traceback"] = "".join(traceback.TracebackException.from_exception(e).format()) + return content + +@app.exception_handler(ConflictError) +async def conflict_error_handler(_, exc: ConflictError): + return JSONResponse( + status_code=409, + content=exc_to_content(exc), + ) + + +@app.exception_handler(ValueError) +async def value_error_handler(_, exc: ValueError): + return JSONResponse( + status_code=400, + content=exc_to_content(exc), + ) + +@app.exception_handler(TypeError) +async def type_error_handler(_, exc: ValueError): + return JSONResponse( + status_code=400, + content=exc_to_content(exc), + ) + + +@app.exception_handler(KeyError) +async def key_error_handler(_, exc: KeyError): + return JSONResponse( + status_code=404, + content=exc_to_content(exc), + ) + +@app.exception_handler(IndexError) +async def index_error_handler(_, exc: IndexError): + return JSONResponse( + status_code=404, + content=exc_to_content(exc), + ) + @router.get("/projects") def get_projects() -> list[str]: return registry.get_projects() +@router.get("/projects-ids") +def get_projects_ids() -> dict: + return registry.get_projects_ids() @router.get("/projects/{project}") def get_projects(project: str) -> dict: @@ -47,6 +95,17 @@ def get_project_datasources(project: str) -> list: return list([e.to_dict() for e in sources]) +@router.get("/projects/{project}/datasources/{datasource}") +def get_datasource(project: str, datasource: str) -> dict: + p = registry.get_entity(project) + for s in p.attributes.sources: + if str(s.id) == datasource: + return s + # If datasource is not found, raise 404 error + raise HTTPException( + status_code=404, detail=f"Data Source {datasource} not found") + + @router.get("/projects/{project}/features") def get_project_features(project: str, keyword: Optional[str] = None, page: Optional[int] = None, limit: Optional[int] = None) -> list: if keyword: @@ -54,7 +113,7 @@ def get_project_features(project: str, keyword: Optional[str] = None, page: Opti size = None if page is not None and limit is not None: start = (page - 1) * limit - size = limit + size = limit efs = registry.search_entity( keyword, [EntityType.AnchorFeature, EntityType.DerivedFeature], project=project, start=start, size=size) feature_ids = [ef.id for ef in efs] diff --git a/registry/sql-registry/registry/__init__.py b/registry/sql-registry/registry/__init__.py index 5ce157408..afcc69eee 100644 --- a/registry/sql-registry/registry/__init__.py +++ b/registry/sql-registry/registry/__init__.py @@ -3,4 +3,4 @@ from registry.models import * from registry.interface import Registry from registry.database import DbConnection, connect -from registry.db_registry import DbRegistry \ No newline at end of file +from registry.db_registry import DbRegistry, ConflictError \ No newline at end of file diff --git a/registry/sql-registry/registry/database.py b/registry/sql-registry/registry/database.py index 39bab8ec4..21b8a2aca 100644 --- a/registry/sql-registry/registry/database.py +++ b/registry/sql-registry/registry/database.py @@ -3,6 +3,13 @@ import logging import threading import os + +# Checks if the platform is Max (Darwin). +# If so, imports _scproxy that is necessary for pymssql to work on MacOS +import platform +if platform.system().lower().startswith('dar'): + import _scproxy + import pymssql @@ -53,7 +60,7 @@ def __init__(self, params): self.params = params self.make_connection() self.mutex = threading.Lock() - + def make_connection(self): self.conn = pymssql.connect(**self.params) @@ -85,10 +92,10 @@ def transaction(self): """ Start a transaction so we can run multiple SQL in one batch. User should use `with` with the returned value, look into db_registry.py for more real usage. - + NOTE: `self.query` and `self.execute` will use a different MSSQL connection so any change made in this transaction will *not* be visible in these calls. - + The minimal implementation could look like this if the underlying engine doesn't support transaction. ``` @contextmanager @@ -125,4 +132,4 @@ def connect(*args, **kargs): ret = p.connect(*args, **kargs) if ret is not None: return ret - raise RuntimeError("Cannot connect to database") \ No newline at end of file + raise RuntimeError("Cannot connect to database") diff --git a/registry/sql-registry/registry/db_registry.py b/registry/sql-registry/registry/db_registry.py index 58f4b98db..1553508d8 100644 --- a/registry/sql-registry/registry/db_registry.py +++ b/registry/sql-registry/registry/db_registry.py @@ -7,6 +7,9 @@ from registry.models import AnchorAttributes, AnchorDef, AnchorFeatureAttributes, AnchorFeatureDef, DerivedFeatureAttributes, DerivedFeatureDef, Edge, EntitiesAndRelations, Entity, EntityRef, EntityType, ProjectAttributes, ProjectDef, RelationshipType, SourceAttributes, SourceDef, _to_type, _to_uuid import json +class ConflictError(Exception): + pass + def quote(id): if isinstance(id, str): @@ -16,7 +19,6 @@ def quote(id): else: return ",".join([quote(i) for i in id]) - class DbRegistry(Registry): def __init__(self): self.conn = connect() @@ -25,6 +27,14 @@ def get_projects(self) -> list[str]: ret = self.conn.query( f"select qualified_name from entities where entity_type=%s", str(EntityType.Project)) return list([r["qualified_name"] for r in ret]) + + def get_projects_ids(self) -> dict: + projects = {} + ret = self.conn.query( + f"select entity_id, qualified_name from entities where entity_type=%s", str(EntityType.Project)) + for r in ret: + projects[r['entity_id']] = r['qualified_name'] + return projects def get_entity(self, id_or_name: Union[str, UUID]) -> Entity: return self._fill_entity(self._get_entity(id_or_name)) @@ -41,6 +51,8 @@ def get_entity_id(self, id_or_name: Union[str, UUID]) -> UUID: # It is a name ret = self.conn.query( f"select entity_id from entities where qualified_name=%s", str(id_or_name)) + if len(ret) == 0: + raise KeyError(f"Entity {id_or_name} not found") return ret[0]["entity_id"] def get_neighbors(self, id_or_name: Union[str, UUID], relationship: RelationshipType) -> list[Edge]: @@ -138,7 +150,7 @@ def create_project(self, definition: ProjectDef) -> UUID: len(r), definition.qualified_name) # The entity with same name already exists but with different type if _to_type(r[0]["entity_type"], EntityType) != EntityType.Project: - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) # Just return the existing project id return _to_uuid(r[0]["entity_id"]) @@ -166,7 +178,7 @@ def create_project_datasource(self, project_id: UUID, definition: SourceDef) -> len(r), definition.qualified_name) # The entity with same name already exists but with different type if _to_type(r[0]["entity_type"], EntityType) != EntityType.Source: - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) attr: SourceAttributes = _to_type( json.loads(r[0]["attributes"]), SourceAttributes) @@ -179,7 +191,7 @@ def create_project_datasource(self, project_id: UUID, definition: SourceDef) -> # Creating exactly same entity # Just return the existing id return _to_uuid(r[0]["entity_id"]) - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) id = uuid4() c.execute(f"insert into entities (entity_id, entity_type, qualified_name, attributes) values (%s, %s, %s, %s)", @@ -207,7 +219,7 @@ def create_project_anchor(self, project_id: UUID, definition: AnchorDef) -> UUID len(r), definition.qualified_name) # The entity with same name already exists but with different type if _to_type(r[0]["entity_type"], EntityType) != EntityType.Anchor: - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) attr: AnchorAttributes = _to_type( json.loads(r[0]["attributes"]), AnchorAttributes) @@ -215,7 +227,7 @@ def create_project_anchor(self, project_id: UUID, definition: AnchorDef) -> UUID # Creating exactly same entity # Just return the existing id return _to_uuid(r[0]["entity_id"]) - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) c.execute("select entity_id, qualified_name from entities where entity_id = %s and entity_type = %s", (str( definition.source_id), str(EntityType.Source))) @@ -257,7 +269,7 @@ def create_project_anchor_feature(self, project_id: UUID, anchor_id: UUID, defin len(r), definition.qualified_name) # The entity with same name already exists but with different type if _to_type(r[0]["entity_type"], EntityType) != EntityType.AnchorFeature: - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) attr: AnchorFeatureAttributes = _to_type( json.loads(r[0]["attributes"]), AnchorFeatureAttributes) @@ -269,7 +281,7 @@ def create_project_anchor_feature(self, project_id: UUID, anchor_id: UUID, defin # Just return the existing id return _to_uuid(r[0]["entity_id"]) # The existing entity has different definition, that's a conflict - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) source_id = anchor.attributes.source.id id = uuid4() @@ -305,7 +317,7 @@ def create_project_derived_feature(self, project_id: UUID, definition: DerivedFe len(r), definition.qualified_name) # The entity with same name already exists but with different type, that's conflict if _to_type(r[0]["entity_type"], EntityType) != EntityType.DerivedFeature: - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) attr: DerivedFeatureAttributes = _to_type( json.loads(r[0]["attributes"]), DerivedFeatureAttributes) @@ -317,7 +329,7 @@ def create_project_derived_feature(self, project_id: UUID, definition: DerivedFe # Just return the existing id return _to_uuid(r[0]["entity_id"]) # The existing entity has different definition, that's a conflict - raise ValueError("Entity %s already exists" % + raise ConflictError("Entity %s already exists" % definition.qualified_name) r1 = [] # Fill `input_anchor_features`, from `definition` we have ids only, we still need qualified names @@ -429,7 +441,7 @@ def _get_entity(self, id_or_name: Union[str, UUID]) -> Entity: where entity_id = %s ''', self.get_entity_id(id_or_name)) if not row: - raise ValueError(f"Entity {id_or_name} not found") + raise KeyError(f"Entity {id_or_name} not found") row=row[0] row["attributes"] = json.loads(row["attributes"]) return _to_type(row, Entity) diff --git a/registry/sql-registry/registry/interface.py b/registry/sql-registry/registry/interface.py index dbaf2e8fd..7f1439079 100644 --- a/registry/sql-registry/registry/interface.py +++ b/registry/sql-registry/registry/interface.py @@ -14,6 +14,13 @@ def get_projects(self) -> list[str]: """ pass + @abstractmethod + def get_projects_ids(self) -> dict: + """ + Returns the ids to names mapping of all projects + """ + pass + @abstractmethod def get_entity(self, id_or_name: Union[str, UUID]) -> Entity: """ diff --git a/src/main/scala/com/linkedin/feathr/common/AnchorExtractor.scala b/src/main/scala/com/linkedin/feathr/common/AnchorExtractor.scala index 2e38e4d04..185c9d2d6 100644 --- a/src/main/scala/com/linkedin/feathr/common/AnchorExtractor.scala +++ b/src/main/scala/com/linkedin/feathr/common/AnchorExtractor.scala @@ -1,7 +1,5 @@ package com.linkedin.feathr.common -import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema - /** * Provides feature values based on some "raw" data element * @@ -39,12 +37,14 @@ trait AnchorExtractor[T] extends AnchorExtractorBase[T] with SparkRowExtractor { * @param datum input row * @return list of feature keys */ - def getKeyFromRow(datum: GenericRowWithSchema): Seq[String] = getKey(datum.asInstanceOf[T]) + def getKeyFromRow(datum: Any): Seq[String] = getKey(datum.asInstanceOf[T]) /** * Get the feature value from the row * @param datum input row * @return A map of feature name to feature value */ - def getFeaturesFromRow(datum: GenericRowWithSchema): Map[String, FeatureValue] = getFeatures(datum.asInstanceOf[T]) + def getFeaturesFromRow(datum: Any): Map[String, FeatureValue] = getFeatures(datum.asInstanceOf[T]) + + override def toString: String = getClass.getSimpleName } diff --git a/src/main/scala/com/linkedin/feathr/common/CanConvertToAvroRDD.scala b/src/main/scala/com/linkedin/feathr/common/CanConvertToAvroRDD.scala new file mode 100644 index 000000000..7051a308c --- /dev/null +++ b/src/main/scala/com/linkedin/feathr/common/CanConvertToAvroRDD.scala @@ -0,0 +1,20 @@ +package com.linkedin.feathr.common + +import org.apache.avro.generic.IndexedRecord +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.DataFrame + +/** + * If an AnchorExtractor only works on a Avro record, it should extends + * this trait, and use convertToAvroRdd to do a one-time batch conversion of DataFrame to RDD of their choice. + * convertToAvroRdd will be called by Feathr engine before calling getKeyFromRow() and getFeaturesFromRow() in AnchorExtractor. + */ +trait CanConvertToAvroRDD { + + /** + * One time batch converting the input data source into a RDD[IndexedRecord] for feature extraction later + * @param df input data source + * @return batch preprocessed dataframe, as RDD[IndexedRecord] + */ + def convertToAvroRdd(df: DataFrame) : RDD[IndexedRecord] +} diff --git a/src/main/scala/com/linkedin/feathr/common/SparkRowExtractor.scala b/src/main/scala/com/linkedin/feathr/common/SparkRowExtractor.scala index 04e715e8c..ad088ac0a 100644 --- a/src/main/scala/com/linkedin/feathr/common/SparkRowExtractor.scala +++ b/src/main/scala/com/linkedin/feathr/common/SparkRowExtractor.scala @@ -1,7 +1,5 @@ package com.linkedin.feathr.common -import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema - /** * An extractor trait that provides APIs to transform a Spark GenericRowWithSchema into feature values */ @@ -12,12 +10,12 @@ trait SparkRowExtractor { * @param datum input row * @return list of feature keys */ - def getKeyFromRow(datum: GenericRowWithSchema): Seq[String] + def getKeyFromRow(datum: Any): Seq[String] /** * Get the feature value from the row * @param datum input row * @return A map of feature name to feature value */ - def getFeaturesFromRow(datum: GenericRowWithSchema): Map[String, FeatureValue] + def getFeaturesFromRow(datum: Any): Map[String, FeatureValue] } \ No newline at end of file diff --git a/src/main/scala/com/linkedin/feathr/offline/PostTransformationUtil.scala b/src/main/scala/com/linkedin/feathr/offline/PostTransformationUtil.scala index eb2f4f0ae..b1f75d662 100644 --- a/src/main/scala/com/linkedin/feathr/offline/PostTransformationUtil.scala +++ b/src/main/scala/com/linkedin/feathr/offline/PostTransformationUtil.scala @@ -1,10 +1,10 @@ package com.linkedin.feathr.offline import java.io.Serializable - import com.linkedin.feathr.common import com.linkedin.feathr.common.{FeatureTypes, FeatureValue} import com.linkedin.feathr.offline.exception.FeatureTransformationException +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.mvel.{FeatureVariableResolverFactory, MvelContext} import com.linkedin.feathr.offline.transformation.MvelDefinition import com.linkedin.feathr.offline.util.{CoercionUtilsScala, FeaturizedDatasetUtils} @@ -32,9 +32,9 @@ private[offline] object PostTransformationUtil { * @param input input feature value * @return transformed feature value */ - def booleanTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: Boolean): Boolean = { + def booleanTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: Boolean, mvelContext: Option[FeathrExpressionExecutionContext]): Boolean = { val toFeatureValue = common.FeatureValue.createBoolean(input) - val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.TERM_VECTOR) + val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.TERM_VECTOR, mvelContext) transformedFeatureValue match { case Success(fVal) => fVal.getAsTermVector.containsKey("true") case Failure(ex) => @@ -57,12 +57,12 @@ private[offline] object PostTransformationUtil { featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, - input: GenericRowWithSchema): Map[String, Float] = { + input: GenericRowWithSchema, mvelContext: Option[FeathrExpressionExecutionContext]): Map[String, Float] = { if (input != null) { val inputMapKey = input.getAs[Seq[String]](FeaturizedDatasetUtils.FDS_1D_TENSOR_DIM) val inputMapVal = input.getAs[Seq[Float]](FeaturizedDatasetUtils.FDS_1D_TENSOR_VALUE) val inputMap = inputMapKey.zip(inputMapVal).toMap - mapTransformer(featureName, mvelExpression, compiledExpression, inputMap) + mapTransformer(featureName, mvelExpression, compiledExpression, inputMap, mvelContext) } else Map() } @@ -79,7 +79,8 @@ private[offline] object PostTransformationUtil { featureNameColumnTuples: Seq[(String, String)], contextDF: DataFrame, transformationDef: Map[String, MvelDefinition], - defaultTransformation: (DataType, String) => Column): DataFrame = { + defaultTransformation: (DataType, String) => Column, + mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { val featureColumnNames = featureNameColumnTuples.map(_._2) // Transform the features with the provided transformations @@ -93,11 +94,11 @@ private[offline] object PostTransformationUtil { val parserContext = MvelContext.newParserContext() val compiledExpression = MVEL.compileExpression(mvelExpressionDef.mvelDef, parserContext) val featureType = mvelExpressionDef.featureType - val convertToString = udf(stringTransformer(featureName, mvelExpressionDef, compiledExpression, _: String)) - val convertToBoolean = udf(booleanTransformer(featureName, mvelExpressionDef, compiledExpression, _: Boolean)) - val convertToFloat = udf(floatTransformer(featureName, mvelExpressionDef, compiledExpression, _: Float)) - val convertToMap = udf(mapTransformer(featureName, mvelExpressionDef, compiledExpression, _: Map[String, Float])) - val convertFDS1dTensorToMap = udf(fds1dTensorTransformer(featureName, mvelExpressionDef, compiledExpression, _: GenericRowWithSchema)) + val convertToString = udf(stringTransformer(featureName, mvelExpressionDef, compiledExpression, _: String, mvelContext)) + val convertToBoolean = udf(booleanTransformer(featureName, mvelExpressionDef, compiledExpression, _: Boolean, mvelContext)) + val convertToFloat = udf(floatTransformer(featureName, mvelExpressionDef, compiledExpression, _: Float, mvelContext)) + val convertToMap = udf(mapTransformer(featureName, mvelExpressionDef, compiledExpression, _: Map[String, Float], mvelContext)) + val convertFDS1dTensorToMap = udf(fds1dTensorTransformer(featureName, mvelExpressionDef, compiledExpression, _: GenericRowWithSchema, mvelContext)) fieldType.dataType match { case _: StringType => convertToString(contextDF(columnName)) case _: NumericType => convertToFloat(contextDF(columnName)) @@ -126,16 +127,17 @@ private[offline] object PostTransformationUtil { featureName: String, featureValue: FeatureValue, compiledExpression: Serializable, - featureType: FeatureTypes): Try[FeatureValue] = Try { + featureType: FeatureTypes, + mvelContext: Option[FeathrExpressionExecutionContext]): Try[FeatureValue] = Try { val args = Map(featureName -> Some(featureValue)) val variableResolverFactory = new FeatureVariableResolverFactory(args) - val transformedValue = MvelContext.executeExpressionWithPluginSupport(compiledExpression, featureValue, variableResolverFactory) + val transformedValue = MvelContext.executeExpressionWithPluginSupportWithFactory(compiledExpression, featureValue, variableResolverFactory, mvelContext.orNull) CoercionUtilsScala.coerceToFeatureValue(transformedValue, featureType) } - private def floatTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: Float): Float = { + private def floatTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: Float, mvelContext: Option[FeathrExpressionExecutionContext]): Float = { val toFeatureValue = common.FeatureValue.createNumeric(input) - val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.NUMERIC) + val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.NUMERIC, mvelContext) transformedFeatureValue match { case Success(fVal) => fVal.getAsNumeric case Failure(ex) => @@ -146,9 +148,9 @@ private[offline] object PostTransformationUtil { } } - private def stringTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: String): String = { + private def stringTransformer(featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, input: String, mvelContext: Option[FeathrExpressionExecutionContext]): String = { val toFeatureValue = common.FeatureValue.createCategorical(input) - val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.CATEGORICAL) + val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.CATEGORICAL, mvelContext) transformedFeatureValue match { case Success(fVal) => fVal.getAsString case Failure(ex) => @@ -163,12 +165,13 @@ private[offline] object PostTransformationUtil { featureName: String, mvelExpression: MvelDefinition, compiledExpression: Serializable, - input: Map[String, Float]): Map[String, Float] = { + input: Map[String, Float], + mvelContext: Option[FeathrExpressionExecutionContext]): Map[String, Float] = { if (input == null) { return Map() } val toFeatureValue = new common.FeatureValue(input.asJava) - val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.TERM_VECTOR) + val transformedFeatureValue = transformFeatureValues(featureName, toFeatureValue, compiledExpression, FeatureTypes.TERM_VECTOR, mvelContext) transformedFeatureValue match { case Success(fVal) => fVal.getAsTermVector.asScala.map(kv => (kv._1.asInstanceOf[String], kv._2.asInstanceOf[Float])).toMap case Failure(ex) => diff --git a/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/DebugMvelAnchorExtractor.scala b/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/DebugMvelAnchorExtractor.scala index 264b2ee9a..c4b574c8a 100644 --- a/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/DebugMvelAnchorExtractor.scala +++ b/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/DebugMvelAnchorExtractor.scala @@ -1,24 +1,22 @@ package com.linkedin.feathr.offline.anchored.anchorExtractor -import java.io.Serializable - import com.linkedin.feathr.offline.config.MVELFeatureDefinition import com.linkedin.feathr.offline.mvel.{MvelContext, MvelUtils} import org.mvel2.MVEL +import java.io.Serializable import scala.collection.convert.wrapAll._ private[offline] class DebugMvelAnchorExtractor(keyExprs: Seq[String], features: Map[String, MVELFeatureDefinition]) extends SimpleConfigurableAnchorExtractor(keyExprs, features) { private val debugExpressions = features.mapValues(value => findDebugExpressions(value.featureExpr)).map(identity) - private val debugCompiledExpressions = debugExpressions.mapValues(_.map(x => (x, compile(x)))).map(identity) def evaluateDebugExpressions(input: Any): Map[String, Seq[(String, Any)]] = { debugCompiledExpressions .mapValues(_.map { case (expr, compiled) => - (expr, MvelUtils.executeExpression(compiled, input, null).orNull) + (expr, MvelUtils.executeExpression(compiled, input, null, "", None).orNull) }) .map(identity) } diff --git a/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/SimpleConfigurableAnchorExtractor.scala b/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/SimpleConfigurableAnchorExtractor.scala index 479167e36..edb2e2c06 100644 --- a/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/SimpleConfigurableAnchorExtractor.scala +++ b/src/main/scala/com/linkedin/feathr/offline/anchored/anchorExtractor/SimpleConfigurableAnchorExtractor.scala @@ -6,10 +6,10 @@ import com.linkedin.feathr.common.util.CoercionUtils import com.linkedin.feathr.common.{AnchorExtractor, FeatureTypeConfig, FeatureTypes, FeatureValue, SparkRowExtractor} import com.linkedin.feathr.offline import com.linkedin.feathr.offline.config.MVELFeatureDefinition +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.mvel.{MvelContext, MvelUtils} import com.linkedin.feathr.offline.util.FeatureValueTypeValidator import org.apache.log4j.Logger -import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.types._ import org.mvel2.MVEL @@ -28,6 +28,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k @JsonProperty("features") features: Map[String, MVELFeatureDefinition]) extends AnchorExtractor[Any] with SparkRowExtractor { + var mvelContext: Option[FeathrExpressionExecutionContext] = None @transient private lazy val log = Logger.getLogger(getClass) def getKeyExpression(): Seq[String] = key @@ -64,7 +65,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k * @param datum input row * @return list of feature keys */ - override def getKeyFromRow(datum: GenericRowWithSchema): Seq[String] = { + override def getKeyFromRow(datum: Any): Seq[String] = { getKey(datum.asInstanceOf[Any]) } @@ -73,7 +74,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k // be more strict for resolving keys (don't swallow exceptions) keyExpression.map(k => try { - Option(MvelContext.executeExpressionWithPluginSupport(k, datum)) match { + Option(MvelContext.executeExpressionWithPluginSupport(k, datum, mvelContext.orNull)) match { case None => null case Some(keys) => keys.toString } @@ -92,7 +93,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k featureExpressions collect { case (featureRefStr, (expression, featureType)) if selectedFeatures.contains(featureRefStr) => - (featureRefStr, (MvelUtils.executeExpression(expression, datum, null, featureRefStr), featureType)) + (featureRefStr, (MvelUtils.executeExpression(expression, datum, null, featureRefStr, mvelContext), featureType)) } collect { // Apply a partial function only for non-empty feature values, empty feature values will be set to default later case (featureRefStr, (Some(value), fType)) => @@ -105,7 +106,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k * @param row input row * @return A map of feature name to feature value */ - override def getFeaturesFromRow(row: GenericRowWithSchema) = { + override def getFeaturesFromRow(row: Any) = { getFeatures(row.asInstanceOf[Any]) } @@ -145,7 +146,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k featureTypeConfigs(featureRefStr) } val featureValue = offline.FeatureValue.fromTypeConfig(value, featureTypeConfig) - FeatureValueTypeValidator.validate(featureValue, featureTypeConfigs(featureRefStr)) + FeatureValueTypeValidator.validate(featureRefStr, featureValue, featureTypeConfigs(featureRefStr) ) (featureRefStr, featureValue) } @@ -165,7 +166,7 @@ private[offline] class SimpleConfigurableAnchorExtractor( @JsonProperty("key") k * for building a tensor. Feature's value type and dimension type(s) are obtained via Feathr's Feature Metadata * Library during tensor construction. */ - (featureRefStr, MvelUtils.executeExpression(expression, datum, null, featureRefStr)) + (featureRefStr, MvelUtils.executeExpression(expression, datum, null, featureRefStr, mvelContext)) } } diff --git a/src/main/scala/com/linkedin/feathr/offline/anchored/keyExtractor/MVELSourceKeyExtractor.scala b/src/main/scala/com/linkedin/feathr/offline/anchored/keyExtractor/MVELSourceKeyExtractor.scala index 209ac89e1..bf5108e8b 100644 --- a/src/main/scala/com/linkedin/feathr/offline/anchored/keyExtractor/MVELSourceKeyExtractor.scala +++ b/src/main/scala/com/linkedin/feathr/offline/anchored/keyExtractor/MVELSourceKeyExtractor.scala @@ -43,7 +43,7 @@ private[feathr] class MVELSourceKeyExtractor(val anchorExtractorV1: AnchorExtrac .toDF() } - def getKey(datum: GenericRowWithSchema): Seq[String] = { + def getKey(datum: Any): Seq[String] = { anchorExtractorV1.getKeyFromRow(datum) } @@ -55,7 +55,7 @@ private[feathr] class MVELSourceKeyExtractor(val anchorExtractorV1: AnchorExtrac */ override def getKeyColumnNames(datum: Option[Any]): Seq[String] = { if (datum.isDefined) { - val size = getKey(datum.get.asInstanceOf[GenericRowWithSchema]).size + val size = getKey(datum.get).size (1 to size).map(JOIN_KEY_PREFIX + _) } else { // return empty join key to signal empty dataset @@ -86,5 +86,6 @@ private[feathr] class MVELSourceKeyExtractor(val anchorExtractorV1: AnchorExtrac // this helps to reduce the number of joins // to the observation data // The default toString does not work, because toString of each object have different values - override def toString: String = getClass.getSimpleName + " with keyExprs:" + keyExprs.mkString(" key:") + override def toString: String = getClass.getSimpleName + " with keyExprs:" + keyExprs.mkString(" key:") + + "anchorExtractor:" + anchorExtractorV1.toString } diff --git a/src/main/scala/com/linkedin/feathr/offline/client/DataFrameColName.scala b/src/main/scala/com/linkedin/feathr/offline/client/DataFrameColName.scala index 830bc34f1..e1ae67a88 100644 --- a/src/main/scala/com/linkedin/feathr/offline/client/DataFrameColName.scala +++ b/src/main/scala/com/linkedin/feathr/offline/client/DataFrameColName.scala @@ -1,5 +1,6 @@ package com.linkedin.feathr.offline.client +import com.google.common.annotations.VisibleForTesting import com.linkedin.feathr.common._ import com.linkedin.feathr.common.exception.{ErrorLabel, FeathrFeatureTransformationException} import com.linkedin.feathr.offline.anchored.feature.FeatureAnchorWithSource @@ -357,11 +358,13 @@ object DataFrameColName { /** * generate header info (e.g, feature type, feature column name map) for output dataframe of * feature join or feature generation + * * @param featureToColumnNameMap map of feature to its column name in the dataframe * @param inferredFeatureTypeConfigs feature name to inferred feature types * @return header info for a dataframe that contains the features in featureToColumnNameMap */ - private def generateHeader( + @VisibleForTesting + def generateHeader( featureToColumnNameMap: Map[TaggedFeatureName, String], allAnchoredFeatures: Map[String, FeatureAnchorWithSource], allDerivedFeatures: Map[String, DerivedFeature], @@ -370,13 +373,10 @@ object DataFrameColName { // if the feature type is unspecified in the anchor config, we will use FeatureTypes.UNSPECIFIED val anchoredFeatureTypes: Map[String, FeatureTypeConfig] = allAnchoredFeatures.map { case (featureName, anchorWithSource) => - val featureTypeOpt = anchorWithSource.featureAnchor.getFeatureTypes.map(types => { - // Get the actual type in the output dataframe, the type is inferred and stored previously, if not specified by users - val inferredType = inferredFeatureTypeConfigs.getOrElse(featureName, FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) - val fType = new FeatureTypeConfig(types.getOrElse(featureName, FeatureTypes.UNSPECIFIED)) - if (fType == FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) inferredType else fType - }) - val featureType = featureTypeOpt.getOrElse(FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) + val featureTypeOpt = anchorWithSource.featureAnchor.featureTypeConfigs.get(featureName) + // Get the actual type in the output dataframe, the type is inferred and stored previously, if not specified by users + val inferredType = inferredFeatureTypeConfigs.getOrElse(featureName, FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) + val featureType = featureTypeOpt.getOrElse(inferredType) featureName -> featureType } diff --git a/src/main/scala/com/linkedin/feathr/offline/client/FeathrClient.scala b/src/main/scala/com/linkedin/feathr/offline/client/FeathrClient.scala index 45f8b2b02..b289ba3c5 100644 --- a/src/main/scala/com/linkedin/feathr/offline/client/FeathrClient.scala +++ b/src/main/scala/com/linkedin/feathr/offline/client/FeathrClient.scala @@ -8,9 +8,10 @@ import com.linkedin.feathr.offline.generation.{DataFrameFeatureGenerator, Featur import com.linkedin.feathr.offline.job._ import com.linkedin.feathr.offline.join.DataFrameFeatureJoiner import com.linkedin.feathr.offline.logical.{FeatureGroups, MultiStageJoinPlanner} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.DataSource import com.linkedin.feathr.offline.source.accessor.DataPathHandler -import com.linkedin.feathr.offline.util.{FeathrUtils, _} +import com.linkedin.feathr.offline.util._ import org.apache.log4j.Logger import org.apache.spark.sql.{DataFrame, SparkSession} import org.apache.spark.sql.internal.SQLConf @@ -27,7 +28,7 @@ import scala.util.{Failure, Success} * */ class FeathrClient private[offline] (sparkSession: SparkSession, featureGroups: FeatureGroups, logicalPlanner: MultiStageJoinPlanner, - featureGroupsUpdater: FeatureGroupsUpdater, dataPathHandlers: List[DataPathHandler]) { + featureGroupsUpdater: FeatureGroupsUpdater, dataPathHandlers: List[DataPathHandler], mvelContext: Option[FeathrExpressionExecutionContext]) { private val log = Logger.getLogger(getClass) type KeyTagStringTuple = Seq[String] @@ -91,7 +92,7 @@ class FeathrClient private[offline] (sparkSession: SparkSession, featureGroups: // Get logical plan val logicalPlan = logicalPlanner.getLogicalPlan(featureGroups, keyTaggedRequiredFeatures) // This pattern is consistent with the join use case which uses DataFrameFeatureJoiner. - val dataFrameFeatureGenerator = new DataFrameFeatureGenerator(logicalPlan=logicalPlan,dataPathHandlers=dataPathHandlers) + val dataFrameFeatureGenerator = new DataFrameFeatureGenerator(logicalPlan=logicalPlan,dataPathHandlers=dataPathHandlers, mvelContext) val featureMap: Map[TaggedFeatureName, (DataFrame, Header)] = dataFrameFeatureGenerator.generateFeaturesAsDF(sparkSession, featureGenSpec, featureGroups, keyTaggedRequiredFeatures) @@ -263,7 +264,7 @@ class FeathrClient private[offline] (sparkSession: SparkSession, featureGroups: s"Please rename feature ${conflictFeatureNames} or rename the same field names in the observation data.") } - val joiner = new DataFrameFeatureJoiner(logicalPlan=logicalPlan,dataPathHandlers=dataPathHandlers) + val joiner = new DataFrameFeatureJoiner(logicalPlan=logicalPlan,dataPathHandlers=dataPathHandlers, mvelContext) joiner.joinFeaturesAsDF(sparkSession, joinConfig, updatedFeatureGroups, keyTaggedFeatures, left, rowBloomFilterThreshold) } @@ -337,6 +338,7 @@ object FeathrClient { private var localOverrideDefPath: List[String] = List() private var featureDefConfs: List[FeathrConfig] = List() private var dataPathHandlers: List[DataPathHandler] = List() + private var mvelContext: Option[FeathrExpressionExecutionContext] = None; /** @@ -495,6 +497,10 @@ object FeathrClient { this.featureDefConfs = featureDefConfs this } + def addFeathrExpressionContext(_mvelContext: Option[FeathrExpressionExecutionContext]): Builder = { + this.mvelContext = _mvelContext + this + } /** * Build a new instance of the FeathrClient from the added feathr definition configs and any local overrides. @@ -529,7 +535,7 @@ object FeathrClient { featureDefConfigs = featureDefConfigs ++ featureDefConfs val featureGroups = FeatureGroupsGenerator(featureDefConfigs, Some(localDefConfigs)).getFeatureGroups() - val feathrClient = new FeathrClient(sparkSession, featureGroups, MultiStageJoinPlanner(), FeatureGroupsUpdater(), dataPathHandlers) + val feathrClient = new FeathrClient(sparkSession, featureGroups, MultiStageJoinPlanner(), FeatureGroupsUpdater(), dataPathHandlers, mvelContext) feathrClient } diff --git a/src/main/scala/com/linkedin/feathr/offline/client/plugins/FeathrUdfPluginContext.scala b/src/main/scala/com/linkedin/feathr/offline/client/plugins/FeathrUdfPluginContext.scala index 852c2a2e6..d67e5b6d5 100644 --- a/src/main/scala/com/linkedin/feathr/offline/client/plugins/FeathrUdfPluginContext.scala +++ b/src/main/scala/com/linkedin/feathr/offline/client/plugins/FeathrUdfPluginContext.scala @@ -1,4 +1,6 @@ package com.linkedin.feathr.offline.client.plugins +import org.apache.spark.SparkContext +import org.apache.spark.broadcast.Broadcast import scala.collection.mutable @@ -9,15 +11,21 @@ import scala.collection.mutable * All "external" UDF classes are required to have a public default zero-arg constructor. */ object FeathrUdfPluginContext { - val registeredUdfAdaptors = mutable.Buffer[UdfAdaptor[_]]() - - def registerUdfAdaptor(adaptor: UdfAdaptor[_]): Unit = { + private val localRegisteredUdfAdaptors = mutable.Buffer[UdfAdaptor[_]]() + private var registeredUdfAdaptors: Broadcast[mutable.Buffer[UdfAdaptor[_]]] = null + def registerUdfAdaptor(adaptor: UdfAdaptor[_], sc: SparkContext): Unit = { this.synchronized { - registeredUdfAdaptors += adaptor + localRegisteredUdfAdaptors += adaptor + if (registeredUdfAdaptors != null) { + registeredUdfAdaptors.destroy() + } + registeredUdfAdaptors = sc.broadcast(localRegisteredUdfAdaptors) } } def getRegisteredUdfAdaptor(clazz: Class[_]): Option[UdfAdaptor[_]] = { - registeredUdfAdaptors.find(_.canAdapt(clazz)) + if (registeredUdfAdaptors != null) { + registeredUdfAdaptors.value.find(_.canAdapt(clazz)) + } else None } } \ No newline at end of file diff --git a/src/main/scala/com/linkedin/feathr/offline/config/FeathrConfigLoader.scala b/src/main/scala/com/linkedin/feathr/offline/config/FeathrConfigLoader.scala index 1faf0d814..e2ec6e588 100644 --- a/src/main/scala/com/linkedin/feathr/offline/config/FeathrConfigLoader.scala +++ b/src/main/scala/com/linkedin/feathr/offline/config/FeathrConfigLoader.scala @@ -327,7 +327,7 @@ private[offline] class AnchorLoader extends JsonDeserializer[FeatureAnchor] { case Some(tType) => offline.FeatureValue.fromTypeConfig(rawValue, tType) case None => offline.FeatureValue(rawValue, featureType, key) } - FeatureValueTypeValidator.validate(featureValue, featureTypeConfig) + FeatureValueTypeValidator.validate(featureValue, featureTypeConfig, key) (key, featureValue) } .toMap diff --git a/src/main/scala/com/linkedin/feathr/offline/config/location/GenericLocation.scala b/src/main/scala/com/linkedin/feathr/offline/config/location/GenericLocation.scala index 80fa47b22..9a4bbb33a 100644 --- a/src/main/scala/com/linkedin/feathr/offline/config/location/GenericLocation.scala +++ b/src/main/scala/com/linkedin/feathr/offline/config/location/GenericLocation.scala @@ -186,6 +186,18 @@ object GenericLocationAdHocPatches { .mode(location.mode.getOrElse("overwrite")) // I don't see if ElasticSearch uses it in any doc .save() } + case "aerospike" => + val keyDf = if (!df.columns.contains("__key")) { + df.withColumn("__key", (monotonically_increasing_id().cast("string"))) + } + else { + df + } + keyDf.write.format(location.format) + .option("aerospike.updatebykey", "__key") + .options(location.options) + .mode(location.mode.getOrElse("append")) + .save() case _ => // Normal writing procedure, just set format and options then write df.write.format(location.format) diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/DerivedFeatureEvaluator.scala b/src/main/scala/com/linkedin/feathr/offline/derived/DerivedFeatureEvaluator.scala index 36de11fae..59dd8ea8e 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/DerivedFeatureEvaluator.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/DerivedFeatureEvaluator.scala @@ -1,18 +1,19 @@ package com.linkedin.feathr.offline.derived -import com.linkedin.feathr.{common, offline} -import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureTypeConfig} import com.linkedin.feathr.common.exception.{ErrorLabel, FeathrException} -import com.linkedin.feathr.offline.{ErasedEntityTaggedFeature, FeatureDataFrame} +import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureTypeConfig} import com.linkedin.feathr.offline.client.DataFrameColName import com.linkedin.feathr.offline.client.plugins.{FeathrUdfPluginContext, FeatureDerivationFunctionAdaptor} -import com.linkedin.feathr.offline.derived.functions.SeqJoinDerivationFunction -import com.linkedin.feathr.offline.derived.strategies.{DerivationStrategies, RowBasedDerivation, SequentialJoinAsDerivation, SparkUdfDerivation} +import com.linkedin.feathr.offline.derived.functions.{MvelFeatureDerivationFunction, SQLFeatureDerivationFunction, SeqJoinDerivationFunction} +import com.linkedin.feathr.offline.derived.strategies._ import com.linkedin.feathr.offline.join.algorithms.{SequentialJoinConditionBuilder, SparkJoinWithJoinCondition} import com.linkedin.feathr.offline.logical.FeatureGroups -import com.linkedin.feathr.offline.util.FeaturizedDatasetUtils +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.DataPathHandler +import com.linkedin.feathr.offline.util.FeaturizedDatasetUtils +import com.linkedin.feathr.offline.{ErasedEntityTaggedFeature, FeatureDataFrame} import com.linkedin.feathr.sparkcommon.FeatureDerivationFunctionSpark +import com.linkedin.feathr.{common, offline} import org.apache.log4j.Logger import org.apache.spark.sql.{DataFrame, SparkSession} @@ -20,7 +21,7 @@ import org.apache.spark.sql.{DataFrame, SparkSession} * This class is responsible for applying feature derivations. * @param derivationStrategies strategies for executing various derivation functions. */ -private[offline] class DerivedFeatureEvaluator(derivationStrategies: DerivationStrategies) { +private[offline] class DerivedFeatureEvaluator(derivationStrategies: DerivationStrategies, mvelContext: Option[FeathrExpressionExecutionContext]) { /** * Calculate a derived feature, this function support all kinds of derived features @@ -39,23 +40,26 @@ private[offline] class DerivedFeatureEvaluator(derivationStrategies: DerivationS derivedFeature.derivation match { case g: SeqJoinDerivationFunction => - val resultDF = derivationStrategies.sequentialJoinDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, g) + val resultDF = derivationStrategies.sequentialJoinDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, g, mvelContext) convertFeatureColumnToQuinceFds(producedFeatureColName, derivedFeature, resultDF) case h: FeatureDerivationFunctionSpark => - val resultDF = derivationStrategies.customDerivationSparkStrategy(keyTag, keyTagList, contextDF, derivedFeature, h) + val resultDF = derivationStrategies.customDerivationSparkStrategy(keyTag, keyTagList, contextDF, derivedFeature, h, mvelContext) + convertFeatureColumnToQuinceFds(producedFeatureColName, derivedFeature, resultDF) + case s: SQLFeatureDerivationFunction => + val resultDF = derivationStrategies.sqlDerivationSparkStrategy(keyTag, keyTagList, contextDF, derivedFeature, s, mvelContext) convertFeatureColumnToQuinceFds(producedFeatureColName, derivedFeature, resultDF) case x: FeatureDerivationFunction => // We should do the FDS conversion inside the rowBasedDerivationStrategy here. The result of rowBasedDerivationStrategy // can be NTV FeatureValue or TensorData-based Feature. NTV FeatureValue has fixed FDS schema. However, TensorData // doesn't have fixed DataFrame schema so that we can't return TensorData but has to return FDS. - val resultDF = derivationStrategies.rowBasedDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, x) + val resultDF = derivationStrategies.rowBasedDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, x, mvelContext) offline.FeatureDataFrame(resultDF, getTypeConfigs(producedFeatureColName, derivedFeature, resultDF)) case derivation => FeathrUdfPluginContext.getRegisteredUdfAdaptor(derivation.getClass) match { case Some(adaptor: FeatureDerivationFunctionAdaptor) => // replicating the FeatureDerivationFunction case above val featureDerivationFunction = adaptor.adaptUdf(derivation) - val resultDF = derivationStrategies.rowBasedDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, featureDerivationFunction) + val resultDF = derivationStrategies.rowBasedDerivationStrategy(keyTag, keyTagList, contextDF, derivedFeature, featureDerivationFunction, mvelContext) offline.FeatureDataFrame(resultDF, getTypeConfigs(producedFeatureColName, derivedFeature, resultDF)) case _ => throw new FeathrException(ErrorLabel.FEATHR_ERROR, s"Unsupported feature derivation function for feature ${derivedFeature.producedFeatureNames.head}.") @@ -108,17 +112,18 @@ private[offline] class DerivedFeatureEvaluator(derivationStrategies: DerivationS private[offline] object DerivedFeatureEvaluator { private val log = Logger.getLogger(getClass) - def apply(derivationStrategies: DerivationStrategies): DerivedFeatureEvaluator = new DerivedFeatureEvaluator(derivationStrategies) + def apply(derivationStrategies: DerivationStrategies, mvelContext: Option[FeathrExpressionExecutionContext]): DerivedFeatureEvaluator = new DerivedFeatureEvaluator(derivationStrategies, mvelContext) def apply(ss: SparkSession, featureGroups: FeatureGroups, - dataPathHandlers: List[DataPathHandler]): DerivedFeatureEvaluator = { + dataPathHandlers: List[DataPathHandler], + mvelContext: Option[FeathrExpressionExecutionContext]): DerivedFeatureEvaluator = { val defaultStrategies = strategies.DerivationStrategies( new SparkUdfDerivation(), - new RowBasedDerivation(featureGroups.allTypeConfigs), - new SequentialJoinAsDerivation(ss, featureGroups, SparkJoinWithJoinCondition(SequentialJoinConditionBuilder), dataPathHandlers) - ) - new DerivedFeatureEvaluator(defaultStrategies) + new RowBasedDerivation(featureGroups.allTypeConfigs, mvelContext), + new SequentialJoinAsDerivation(ss, featureGroups, SparkJoinWithJoinCondition(SequentialJoinConditionBuilder), dataPathHandlers), + new SqlDerivationSpark()) + new DerivedFeatureEvaluator(defaultStrategies, mvelContext) } /** @@ -132,7 +137,9 @@ private[offline] object DerivedFeatureEvaluator { def evaluateFromFeatureValues( keyTag: Seq[Int], derivedFeature: DerivedFeature, - contextFeatureValues: Map[common.ErasedEntityTaggedFeature, common.FeatureValue]): Map[common.ErasedEntityTaggedFeature, common.FeatureValue] = { + contextFeatureValues: Map[common.ErasedEntityTaggedFeature, common.FeatureValue], + mvelContext: Option[FeathrExpressionExecutionContext] + ): Map[common.ErasedEntityTaggedFeature, common.FeatureValue] = { try { val linkedInputParams = derivedFeature.consumedFeatureNames.map { case ErasedEntityTaggedFeature(calleeTag, featureName) => @@ -141,7 +148,13 @@ private[offline] object DerivedFeatureEvaluator { // for features with value `null`, convert Some(null) to None, to avoid null pointer exception in downstream analysis val keyedContextFeatureValues = contextFeatureValues.map(kv => (kv._1.getErasedTagFeatureName, kv._2)) val resolvedInputArgs = linkedInputParams.map(taggedFeature => keyedContextFeatureValues.get(taggedFeature.getErasedTagFeatureName).flatMap(Option(_))) - val unlinkedOutput = derivedFeature.getAsFeatureDerivationFunction.getFeatures(resolvedInputArgs) + val derivedFunc = derivedFeature.getAsFeatureDerivationFunction match { + case derivedFunc: MvelFeatureDerivationFunction => + derivedFunc.mvelContext = mvelContext + derivedFunc + case func => func + } + val unlinkedOutput = derivedFunc.getFeatures(resolvedInputArgs) val callerKeyTags = derivedFeature.producedFeatureNames.map(ErasedEntityTaggedFeature(keyTag, _)) // This would indicate a problem with the DerivedFeature, where there are a different number of features included in diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction.scala b/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction.scala index 58902e669..42f09ad21 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/functions/MvelFeatureDerivationFunction.scala @@ -4,6 +4,7 @@ import com.linkedin.feathr.common import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureTypeConfig, TaggedFeatureName} import com.linkedin.feathr.offline.FeatureValue import com.linkedin.feathr.offline.config.TaggedDependency +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.mvel.{FeatureVariableResolverFactory, MvelContext, MvelUtils} import org.mvel2.MVEL @@ -31,6 +32,7 @@ private[offline] class MvelFeatureDerivationFunction( featureTypeConfigOpt: Option[FeatureTypeConfig] = None) extends FeatureDerivationFunction { + var mvelContext: Option[FeathrExpressionExecutionContext] = None val parameterNames: Seq[String] = inputFeatures.keys.toIndexedSeq private val compiledExpression = { @@ -42,7 +44,7 @@ private[offline] class MvelFeatureDerivationFunction( val argMap = (parameterNames zip inputs).toMap val variableResolverFactory = new FeatureVariableResolverFactory(argMap) - MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory) match { + MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory, featureName, mvelContext) match { case Some(value) => val featureTypeConfig = featureTypeConfigOpt.getOrElse(FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) if (value.isInstanceOf[common.FeatureValue]) { diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/functions/SimpleMvelDerivationFunction.scala b/src/main/scala/com/linkedin/feathr/offline/derived/functions/SimpleMvelDerivationFunction.scala index 9e1f6b0bb..203d1886f 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/functions/SimpleMvelDerivationFunction.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/functions/SimpleMvelDerivationFunction.scala @@ -3,6 +3,7 @@ package com.linkedin.feathr.offline.derived.functions import com.linkedin.feathr.common import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureTypeConfig} import com.linkedin.feathr.offline.FeatureValue +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.mvel.{FeatureVariableResolverFactory, MvelContext, MvelUtils} import com.linkedin.feathr.offline.testfwk.TestFwkUtils import org.apache.log4j.Logger @@ -19,6 +20,7 @@ private[offline] class SimpleMvelDerivationFunction(expression: String, featureN extends FeatureDerivationFunction { @transient private lazy val log = Logger.getLogger(getClass) + var mvelContext: Option[FeathrExpressionExecutionContext] = None // strictMode should only be modified by FeathrConfigLoader when loading config, default value to be false var strictMode = false @@ -51,7 +53,7 @@ private[offline] class SimpleMvelDerivationFunction(expression: String, featureN } } - MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory) match { + MvelUtils.executeExpression(compiledExpression, null, variableResolverFactory, featureName, mvelContext) match { case Some(value) => val featureTypeConfig = featureTypeConfigOpt.getOrElse(FeatureTypeConfig.UNDEFINED_TYPE_CONFIG) val featureValue = FeatureValue.fromTypeConfig(value, featureTypeConfig) diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/DerivationStrategies.scala b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/DerivationStrategies.scala index 6f7ea1eab..13fbec9c7 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/DerivationStrategies.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/DerivationStrategies.scala @@ -1,8 +1,9 @@ package com.linkedin.feathr.offline.derived.strategies import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureDerivationFunctionBase} -import com.linkedin.feathr.offline.derived.functions.SeqJoinDerivationFunction import com.linkedin.feathr.offline.derived.DerivedFeature +import com.linkedin.feathr.offline.derived.functions.{SQLFeatureDerivationFunction, SeqJoinDerivationFunction} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.sparkcommon.FeatureDerivationFunctionSpark import org.apache.spark.sql.DataFrame @@ -12,7 +13,6 @@ import org.apache.spark.sql.DataFrame * A derivation strategy encapsulates the execution of derivations. */ private[offline] trait DerivationStrategy[T <: FeatureDerivationFunctionBase] { - /** * Apply the derivation strategy. * @param keyTags keyTags for the derived feature. @@ -22,7 +22,7 @@ private[offline] trait DerivationStrategy[T <: FeatureDerivationFunctionBase] { * @param derivationFunction Derivation function to evaluate the derived feature * @return output DataFrame with derived feature. */ - def apply(keyTags: Seq[Int], keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, derivationFunction: T): DataFrame + def apply(keyTags: Seq[Int], keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, derivationFunction: T, mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame } /** @@ -41,10 +41,17 @@ private[offline] trait RowBasedDerivationStrategy extends DerivationStrategy[Fea */ private[offline] trait SequentialJoinDerivationStrategy extends DerivationStrategy[SeqJoinDerivationFunction] +/** + * Implementation should define how a SQL-expression based derivation is evaluated. + */ +private[offline] trait SqlDerivationSparkStrategy extends DerivationStrategy[SQLFeatureDerivationFunction] + /** * This case class holds the implementations of supported strategies. */ private[offline] case class DerivationStrategies( customDerivationSparkStrategy: SparkUdfDerivationStrategy, rowBasedDerivationStrategy: RowBasedDerivationStrategy, - sequentialJoinDerivationStrategy: SequentialJoinDerivationStrategy) + sequentialJoinDerivationStrategy: SequentialJoinDerivationStrategy, + sqlDerivationSparkStrategy: SqlDerivationSparkStrategy) { +} diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/RowBasedDerivation.scala b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/RowBasedDerivation.scala index 389c530ee..ca78ff464 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/RowBasedDerivation.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/RowBasedDerivation.scala @@ -6,6 +6,7 @@ import com.linkedin.feathr.common.{FeatureDerivationFunction, FeatureTypeConfig, import com.linkedin.feathr.offline.ErasedEntityTaggedFeature import com.linkedin.feathr.offline.client.DataFrameColName import com.linkedin.feathr.offline.derived.{DerivedFeature, DerivedFeatureEvaluator} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.testfwk.TestFwkUtils import com.linkedin.feathr.offline.transformation.FDSConversionUtils import com.linkedin.feathr.offline.util.FeaturizedDatasetUtils.tensorTypeToDataFrameSchema @@ -21,7 +22,9 @@ import scala.collection.mutable /** * This class executes custom derivation logic defined in an implementation of FeatureDerivationFunction. */ -class RowBasedDerivation(dependentFeatureTypeConfigs: Map[String, FeatureTypeConfig]) extends RowBasedDerivationStrategy with Serializable { +class RowBasedDerivation(dependentFeatureTypeConfigs: Map[String, FeatureTypeConfig], + val mvelContext: Option[FeathrExpressionExecutionContext], + ) extends RowBasedDerivationStrategy with Serializable { /** * Calculate a Row-based derived features such as Mvel based derivations or UDFs. @@ -44,7 +47,8 @@ class RowBasedDerivation(dependentFeatureTypeConfigs: Map[String, FeatureTypeCon keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, - derivationFunction: FeatureDerivationFunction): DataFrame = { + derivationFunction: FeatureDerivationFunction, + mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { if (derivationFunction.isInstanceOf[FeatureDerivationFunctionSpark]) { throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, s"Unsupported user customized derived feature ${derivedFeature.producedFeatureNames}") } @@ -96,7 +100,7 @@ class RowBasedDerivation(dependentFeatureTypeConfigs: Map[String, FeatureTypeCon contextFeatureValues.put(ErasedEntityTaggedFeature(dependFeature.getBinding, dependFeature.getFeatureName), featureValue) }) // calculate using original function - val features = DerivedFeatureEvaluator.evaluateFromFeatureValues(keyTags, derivedFeature, contextFeatureValues.toMap) + val features = DerivedFeatureEvaluator.evaluateFromFeatureValues(keyTags, derivedFeature, contextFeatureValues.toMap, mvelContext) val taggFeatures = features.map(kv => (kv._1.getErasedTagFeatureName, kv._2)) val featureValues = featureNames.map(featureName => { taggFeatures.get(ErasedEntityTaggedFeature(keyTags, featureName).getErasedTagFeatureName).map { featureValue => diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SequentialJoinAsDerivation.scala b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SequentialJoinAsDerivation.scala index 9cc3080d9..2cee39d95 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SequentialJoinAsDerivation.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SequentialJoinAsDerivation.scala @@ -14,13 +14,14 @@ import com.linkedin.feathr.offline.job.FeatureTransformation._ import com.linkedin.feathr.offline.job.{AnchorFeatureGroups, FeatureTransformation, KeyedTransformedResult} import com.linkedin.feathr.offline.join.algorithms.{JoinType, SeqJoinExplodedJoinKeyColumnAppender, SparkJoinWithJoinCondition} import com.linkedin.feathr.offline.logical.FeatureGroups +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.DataPathHandler import com.linkedin.feathr.offline.transformation.DataFrameDefaultValueSubstituter.substituteDefaults import com.linkedin.feathr.offline.transformation.{AnchorToDataSourceMapper, MvelDefinition} -import com.linkedin.feathr.offline.util.{CoercionUtilsScala, DataFrameSplitterMerger, FeaturizedDatasetUtils, FeathrUtils} +import com.linkedin.feathr.offline.util.{CoercionUtilsScala, DataFrameSplitterMerger, FeathrUtils, FeaturizedDatasetUtils} import com.linkedin.feathr.sparkcommon.{ComplexAggregation, SeqJoinCustomAggregation} import org.apache.log4j.Logger -import org.apache.spark.sql.functions.{expr, udf, _} +import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ import org.apache.spark.sql.{Column, DataFrame, Row, SparkSession} @@ -44,7 +45,8 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, - derivationFunction: SeqJoinDerivationFunction): DataFrame = { + derivationFunction: SeqJoinDerivationFunction, + mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { val allAnchoredFeatures = featureGroups.allAnchoredFeatures // gather sequential join feature info val seqJoinDerivationFunction = derivationFunction @@ -70,7 +72,7 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, */ val (expansion, expansionJoinKey): (DataFrame, Seq[String]) = if (allAnchoredFeatures.contains(expansionFeatureName)) { // prepare and get right table - loadExpansionAnchor(expansionFeatureName, derivedFeature, allAnchoredFeatures, seqJoinColumnName) + loadExpansionAnchor(expansionFeatureName, derivedFeature, allAnchoredFeatures, seqJoinColumnName, mvelContext) } else { throw new FeathrException( ErrorLabel.FEATHR_ERROR, @@ -93,7 +95,7 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, Map(baseTaggedDependency.feature -> MvelDefinition(transformation)) } getOrElse Map.empty[String, MvelDefinition] - val left: DataFrame = PostTransformationUtil.transformFeatures(featureNameColumnTuples, obsWithLeftJoined, transformationDef, getDefaultTransformation) + val left: DataFrame = PostTransformationUtil.transformFeatures(featureNameColumnTuples, obsWithLeftJoined, transformationDef, getDefaultTransformation, mvelContext) // Partition build side of the join based on null values val (dfWithNoNull, dfWithNull) = DataFrameSplitterMerger.splitOnNull(left, baseFeatureJoinKey.head) @@ -207,7 +209,8 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, def getAnchorFeatureDF( allAnchoredFeatures: Map[String, FeatureAnchorWithSource], anchorFeatureName: String, - anchorToDataSourceMapper: AnchorToDataSourceMapper): KeyedTransformedResult = { + anchorToDataSourceMapper: AnchorToDataSourceMapper, + mvelContext: Option[FeathrExpressionExecutionContext]): KeyedTransformedResult = { val featureAnchor = allAnchoredFeatures(anchorFeatureName) val requestedFeatures = featureAnchor.featureAnchor.getProvidedFeatureNames val anchorGroup = AnchorFeatureGroups(Seq(featureAnchor), requestedFeatures) @@ -219,7 +222,9 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, anchorDFMap1(featureAnchor), featureAnchor.featureAnchor.sourceKeyExtractor, None, - None) + None, + None, + mvelContext) (featureInfo) } @@ -590,10 +595,11 @@ private[offline] class SequentialJoinAsDerivation(ss: SparkSession, expansionFeatureName: String, derivedFeature: DerivedFeature, allAnchoredFeatures: Map[String, FeatureAnchorWithSource], - seqJoinproducedFeatureName: String): (DataFrame, Seq[String]) = { + seqJoinproducedFeatureName: String, + mvelContext: Option[FeathrExpressionExecutionContext]): (DataFrame, Seq[String]) = { val expansionFeatureKeys = (derivedFeature.derivation.asInstanceOf[SeqJoinDerivationFunction].right.key) val expansionAnchor = allAnchoredFeatures(expansionFeatureName) - val expandFeatureInfo = getAnchorFeatureDF(allAnchoredFeatures, expansionFeatureName, new AnchorToDataSourceMapper(dataPathHandlers)) + val expandFeatureInfo = getAnchorFeatureDF(allAnchoredFeatures, expansionFeatureName, new AnchorToDataSourceMapper(dataPathHandlers), mvelContext) val transformedFeatureDF = expandFeatureInfo.transformedResult.df val expansionAnchorKeyColumnNames = expandFeatureInfo.joinKey if (expansionFeatureKeys.size != expansionAnchorKeyColumnNames.size) { diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SparkUdfDerivation.scala b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SparkUdfDerivation.scala index ba65e3f23..1d4a9212e 100644 --- a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SparkUdfDerivation.scala +++ b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SparkUdfDerivation.scala @@ -6,6 +6,7 @@ import com.linkedin.feathr.offline.ErasedEntityTaggedFeature import com.linkedin.feathr.offline.client.DataFrameColName import com.linkedin.feathr.offline.derived.DerivedFeature import com.linkedin.feathr.offline.exception.FeatureTransformationException +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.sparkcommon.FeatureDerivationFunctionSpark import org.apache.spark.sql.DataFrame @@ -30,7 +31,8 @@ class SparkUdfDerivation extends SparkUdfDerivationStrategy { keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, - derivationFunction: FeatureDerivationFunctionSpark): DataFrame = { + derivationFunction: FeatureDerivationFunctionSpark, + mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { if (derivedFeature.parameterNames.isEmpty) { throw new FeathrException( ErrorLabel.FEATHR_USER_ERROR, diff --git a/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SqlDerivationSpark.scala b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SqlDerivationSpark.scala new file mode 100644 index 000000000..c7b44c1cf --- /dev/null +++ b/src/main/scala/com/linkedin/feathr/offline/derived/strategies/SqlDerivationSpark.scala @@ -0,0 +1,118 @@ +package com.linkedin.feathr.offline.derived.strategies + +import com.linkedin.feathr.common.exception.{ErrorLabel, FeathrFeatureTransformationException} +import com.linkedin.feathr.offline.client.DataFrameColName +import com.linkedin.feathr.offline.derived.DerivedFeature +import com.linkedin.feathr.offline.derived.functions.SQLFeatureDerivationFunction +import com.linkedin.feathr.offline.job.FeatureTransformation.FEATURE_NAME_PREFIX +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext +import org.apache.spark.sql.functions.expr +import org.apache.spark.sql.{DataFrame, SparkSession} + +import scala.collection.JavaConverters._ + +/** + * This class executes SQL-expression based derived feature. + */ +class SqlDerivationSpark extends SqlDerivationSparkStrategy { + + + /** + * Rewrite sqlExpression for a derived feature, e.g, replace the feature name/argument name with Frame internal dataframe column name + * @param deriveFeature derived feature definition + * @param keyTag list of tags represented by integer + * @param keyTagId2StringMap Map from the tag integer id to the string tag + * @param asIsFeatureNames features names that does not to be rewritten, i.e. passthrough features, as they do not have key tags + * @return Rewritten SQL expression + */ + private[offline] def rewriteDerivedFeatureExpression( + deriveFeature: DerivedFeature, + keyTag: Seq[Int], + keyTagId2StringMap: Seq[String], + asIsFeatureNames: Set[String]): String = { + if (!deriveFeature.derivation.isInstanceOf[SQLFeatureDerivationFunction]) { + throw new FeathrFeatureTransformationException(ErrorLabel.FEATHR_ERROR, "Should not rewrite derived feature expression for non-SQLDerivedFeatures") + } + val sqlDerivation = deriveFeature.derivation.asInstanceOf[SQLFeatureDerivationFunction] + val deriveExpr = sqlDerivation.getExpression() + val parameterNames: Seq[String] = sqlDerivation.getParameterNames().getOrElse(Seq[String]()) + val consumedFeatureNames = deriveFeature.consumedFeatureNames.zipWithIndex.map { + case (consumeFeatureName, index) => + // begin of string, or other char except number and alphabet + // val featureStartPattern = """(^|[^a-zA-Z0-9])""" + // end of string, or other char except number and alphabet + // val featureEndPattern = """($|[^a-zA-Z0-9])""" + val namePattern = if (parameterNames.isEmpty) consumeFeatureName.getFeatureName else parameterNames(index) + // getBinding.map(keyTag.get) resolves the call tags + val newName = + if (!asIsFeatureNames.contains(FEATURE_NAME_PREFIX + consumeFeatureName.getFeatureName) + // Feature generation code path does not create columns with tags. + // The check ensures we do not run into IndexOutOfBoundsException when keyTag & keyTagId2StringMap are empty. + && keyTag.nonEmpty + && keyTagId2StringMap.nonEmpty) { + DataFrameColName.genFeatureColumnName( + consumeFeatureName.getFeatureName, + Some(consumeFeatureName.getBinding.asScala.map(keyTag(_)).map(keyTagId2StringMap))) + } else { + DataFrameColName.genFeatureColumnName(consumeFeatureName.getFeatureName) + } + (namePattern, newName) + }.toMap + + // replace all feature name to column names + // featureName is consist of numAlphabetic + val ss: SparkSession = SparkSession.builder().getOrCreate() + val dependencyFeatures = ss.sessionState.sqlParser.parseExpression(deriveExpr).references.map(_.name).toSeq + // \w is [a-zA-Z0-9_], not inclusion of _ and exclusion of -, as - is ambiguous, e.g, a-b could be a feature name or feature a minus feature b + val rewrittenExpr = dependencyFeatures.foldLeft(deriveExpr)((acc, ca) => { + // in scala \W does not work as ^\w + // "a+B+1".replaceAll("([^\w])B([^\w])", "$1abc$2" = A+abc+1 + // "a+B".replaceAll("([^\w])B$", "$1abc" = a+abc + // "B+1".replaceAll("^B([^\w])", "abc$1" = abc+1 + // "B".replaceAll("^B$", "abc" = abc + val newVal = consumedFeatureNames.getOrElse(ca, ca) + val patterns = Seq("([^\\w])" + ca + "([^\\w])", "([^\\w])" + ca + "$", "^" + ca + "([^\\w])", "^" + ca + "$") + val replacements = Seq("$1" + newVal + "$2", "$1" + newVal, newVal + "$1", newVal) + val replacedExpr = patterns + .zip(replacements) + .toMap + .foldLeft(acc)((orig, pairs) => { + orig.replaceAll(pairs._1, pairs._2) + }) + replacedExpr + }) + rewrittenExpr + } + + /** + * Apply the derivation strategy. + * + * @param keyTags keyTags for the derived feature. + * @param keyTagList integer keyTag to string keyTag map. + * @param df input DataFrame. + * @param derivedFeature Derived feature metadata. + * @param derivationFunction Derivation function to evaluate the derived feature + * @return output DataFrame with derived feature. + */ + override def apply(keyTags: Seq[Int], + keyTagList: Seq[String], + df: DataFrame, + derivedFeature: DerivedFeature, + derivationFunction: SQLFeatureDerivationFunction, + mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { + // sql expression based derived feature needs rewrite, e.g, replace the feature names with feature column names in the dataframe + // Passthrough fields do not need rewrite as they do not have tags. + val passthroughFieldNames = df.schema.fields.map(f => + if (f.name.startsWith(FEATURE_NAME_PREFIX)) { + f.name + } else { + FEATURE_NAME_PREFIX + f.name + } + ).toSet + val rewrittenExpr = rewriteDerivedFeatureExpression(derivedFeature, keyTags, keyTagList, passthroughFieldNames) + val tags = Some(keyTags.map(keyTagList).toList) + val featureColumnName = DataFrameColName.genFeatureColumnName(derivedFeature.producedFeatureNames.head, tags) + df.withColumn(featureColumnName, expr(rewrittenExpr)) + } + +} diff --git a/src/main/scala/com/linkedin/feathr/offline/generation/DataFrameFeatureGenerator.scala b/src/main/scala/com/linkedin/feathr/offline/generation/DataFrameFeatureGenerator.scala index f52b0a4b5..57f4def55 100644 --- a/src/main/scala/com/linkedin/feathr/offline/generation/DataFrameFeatureGenerator.scala +++ b/src/main/scala/com/linkedin/feathr/offline/generation/DataFrameFeatureGenerator.scala @@ -5,11 +5,12 @@ import com.linkedin.feathr.common.{Header, JoiningFeatureParams, TaggedFeatureNa import com.linkedin.feathr.offline import com.linkedin.feathr.offline.anchored.feature.FeatureAnchorWithSource.{getDefaultValues, getFeatureTypes} import com.linkedin.feathr.offline.derived.functions.SeqJoinDerivationFunction -import com.linkedin.feathr.offline.derived.strategies.{DerivationStrategies, RowBasedDerivation, SequentialJoinDerivationStrategy, SparkUdfDerivation} +import com.linkedin.feathr.offline.derived.strategies.{DerivationStrategies, RowBasedDerivation, SequentialJoinDerivationStrategy, SparkUdfDerivation, SqlDerivationSpark} import com.linkedin.feathr.offline.derived.{DerivedFeature, DerivedFeatureEvaluator} import com.linkedin.feathr.offline.evaluator.DerivedFeatureGenStage import com.linkedin.feathr.offline.job.{FeatureGenSpec, FeatureTransformation} import com.linkedin.feathr.offline.logical.{FeatureGroups, MultiStageJoinPlan} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.DataPathHandler import com.linkedin.feathr.offline.source.dataloader.DataLoaderHandler import com.linkedin.feathr.offline.transformation.AnchorToDataSourceMapper @@ -20,7 +21,9 @@ import org.apache.spark.sql.{DataFrame, SparkSession} * Feature generator that is responsible for generating anchored and derived features. * @param logicalPlan logical plan for feature generation job. */ -private[offline] class DataFrameFeatureGenerator(logicalPlan: MultiStageJoinPlan, dataPathHandlers: List[DataPathHandler]) extends Serializable { +private[offline] class DataFrameFeatureGenerator(logicalPlan: MultiStageJoinPlan, + dataPathHandlers: List[DataPathHandler], + mvelContext: Option[FeathrExpressionExecutionContext]) extends Serializable { @transient val incrementalAggSnapshotLoader = IncrementalAggSnapshotLoader @transient val anchorToDataFrameMapper = new AnchorToDataSourceMapper(dataPathHandlers) @transient val featureGenFeatureGrouper = FeatureGenFeatureGrouper() @@ -72,7 +75,7 @@ private[offline] class DataFrameFeatureGenerator(logicalPlan: MultiStageJoinPlan val anchoredDFThisStage = anchorDFRDDMap.filterKeys(anchoredFeaturesThisStage.toSet) FeatureTransformation - .transformFeatures(anchoredDFThisStage, anchoredFeatureNamesThisStage, None, Some(incrementalAggContext)) + .transformFeatures(anchoredDFThisStage, anchoredFeatureNamesThisStage, None, Some(incrementalAggContext), mvelContext) .map(f => (f._1, (offline.FeatureDataFrame(f._2.transformedResult.df, f._2.transformedResult.inferredFeatureTypes), f._2.joinKey))) }.toMap @@ -117,18 +120,20 @@ private[offline] class DataFrameFeatureGenerator(logicalPlan: MultiStageJoinPlan DerivedFeatureEvaluator( DerivationStrategies( new SparkUdfDerivation(), - new RowBasedDerivation(featureGroups.allTypeConfigs), + new RowBasedDerivation(featureGroups.allTypeConfigs, mvelContext), new SequentialJoinDerivationStrategy { override def apply( keyTags: Seq[Int], keyTagList: Seq[String], df: DataFrame, derivedFeature: DerivedFeature, - derivationFunction: SeqJoinDerivationFunction): DataFrame = { + derivationFunction: SeqJoinDerivationFunction, mvelContext: Option[FeathrExpressionExecutionContext]): DataFrame = { // Feature generation does not support sequential join features throw new FeathrException( ErrorLabel.FEATHR_ERROR, s"Feature Generation does not support Sequential Join features : ${derivedFeature.producedFeatureNames.head}") } - })) + }, + new SqlDerivationSpark() + ), mvelContext) } diff --git a/src/main/scala/com/linkedin/feathr/offline/generation/StreamingFeatureGenerator.scala b/src/main/scala/com/linkedin/feathr/offline/generation/StreamingFeatureGenerator.scala index 99436b93c..126128323 100644 --- a/src/main/scala/com/linkedin/feathr/offline/generation/StreamingFeatureGenerator.scala +++ b/src/main/scala/com/linkedin/feathr/offline/generation/StreamingFeatureGenerator.scala @@ -6,7 +6,7 @@ import com.linkedin.feathr.common.JoiningFeatureParams import com.linkedin.feathr.offline.config.location.KafkaEndpoint import com.linkedin.feathr.offline.generation.outputProcessor.PushToRedisOutputProcessor.TABLE_PARAM_CONFIG_NAME import com.linkedin.feathr.offline.generation.outputProcessor.RedisOutputUtils -import com.linkedin.feathr.offline.job.FeatureTransformation.getFeatureJoinKey +import com.linkedin.feathr.offline.job.FeatureTransformation.getFeatureKeyColumnNames import com.linkedin.feathr.offline.job.{FeatureGenSpec, FeatureTransformation} import com.linkedin.feathr.offline.logical.FeatureGroups import com.linkedin.feathr.offline.source.accessor.DataPathHandler @@ -111,7 +111,7 @@ class StreamingFeatureGenerator(dataPathHandlers: List[DataPathHandler]) { // Apply feature transformation val transformedResult = DataFrameBasedSqlEvaluator.transform(anchor.featureAnchor.extractor.asInstanceOf[SimpleAnchorExtractorSpark], withKeyColumnDF, featureNamePrefixPairs, anchor.featureAnchor.featureTypeConfigs) - val outputJoinKeyColumnNames = getFeatureJoinKey(keyExtractor, withKeyColumnDF) + val outputJoinKeyColumnNames = getFeatureKeyColumnNames(keyExtractor, withKeyColumnDF) val selectedColumns = outputJoinKeyColumnNames ++ anchor.selectedFeatures.filter(keyTaggedFeatures.map(_.featureName).contains(_)) val cleanedDF = transformedResult.df.select(selectedColumns.head, selectedColumns.tail:_*) val keyColumnNames = FeatureTransformation.getStandardizedKeyNames(outputJoinKeyColumnNames.size) diff --git a/src/main/scala/com/linkedin/feathr/offline/job/FeatureTransformation.scala b/src/main/scala/com/linkedin/feathr/offline/job/FeatureTransformation.scala index 9b713a882..7b106572b 100644 --- a/src/main/scala/com/linkedin/feathr/offline/job/FeatureTransformation.scala +++ b/src/main/scala/com/linkedin/feathr/offline/job/FeatureTransformation.scala @@ -1,16 +1,18 @@ package com.linkedin.feathr.offline.job -import com.linkedin.feathr.common._ import com.linkedin.feathr.common.exception.{ErrorLabel, FeathrException, FeathrFeatureTransformationException} +import com.linkedin.feathr.common.tensor.TensorData +import com.linkedin.feathr.common.types.FeatureType +import com.linkedin.feathr.common.{AnchorExtractorBase, _} import com.linkedin.feathr.offline.anchored.anchorExtractor.{SQLConfigurableAnchorExtractor, SimpleConfigurableAnchorExtractor, TimeWindowConfigurableAnchorExtractor} import com.linkedin.feathr.offline.anchored.feature.{FeatureAnchor, FeatureAnchorWithSource} import com.linkedin.feathr.offline.anchored.keyExtractor.MVELSourceKeyExtractor import com.linkedin.feathr.offline.client.DataFrameColName -import com.linkedin.feathr.offline.client.plugins.{SimpleAnchorExtractorSparkAdaptor, FeathrUdfPluginContext, AnchorExtractorAdaptor} import com.linkedin.feathr.offline.config.{MVELFeatureDefinition, TimeWindowFeatureDefinition} import com.linkedin.feathr.offline.generation.IncrementalAggContext import com.linkedin.feathr.offline.job.FeatureJoinJob.FeatureName import com.linkedin.feathr.offline.join.DataFrameKeyCombiner +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.{DataSourceAccessor, NonTimeBasedDataSourceAccessor, TimeBasedDataSourceAccessor} import com.linkedin.feathr.offline.swa.SlidingWindowFeatureUtils import com.linkedin.feathr.offline.transformation.FeatureColumnFormat.FeatureColumnFormat @@ -22,6 +24,7 @@ import com.linkedin.feathr.offline.{FeatureDataFrame, JoinKeys} import com.linkedin.feathr.sparkcommon.{SimpleAnchorExtractorSpark, SourceKeyExtractor} import com.linkedin.feathr.swj.aggregate.AggregationType import com.linkedin.feathr.{common, offline} +import org.apache.avro.generic.IndexedRecord import org.apache.log4j.Logger import org.apache.spark.rdd.RDD import org.apache.spark.sql.functions._ @@ -41,6 +44,16 @@ import scala.concurrent.{Await, ExecutionContext, Future} */ private[offline] case class AnchorFeatureGroups(anchorsWithSameSource: Seq[FeatureAnchorWithSource], requestedFeatures: Seq[String]) +/** + * Context info needed in feature transformation + * @param featureAnchorWithSource feature annchor with its source + * @param featureNamePrefixPairs map of feature name to its prefix + * @param transformer transformer of anchor + */ +private[offline] case class TransformInfo(featureAnchorWithSource: FeatureAnchorWithSource, + featureNamePrefixPairs: Seq[(FeatureName, FeatureName)], + transformer: AnchorExtractorBase[IndexedRecord]) + /** * Represent the transformed result of an anchor extractor after evaluating its features * @param featureNameAndPrefixPairs pairs of feature name and feature name prefix @@ -75,7 +88,27 @@ private[offline] object FeatureTransformation { // feature name, column prefix type FeatureNameAndColumnPrefix = (String, String) - def getFeatureJoinKey(sourceKeyExtractor: SourceKeyExtractor, withKeyColumnDF: DataFrame, featureExtractor: Option[AnyRef] = None): Seq[String] = { + /** + * Extract feature key column names from the input feature RDD using the sourceKeyExtractor. + * @param sourceKeyExtractor key extractor that knows what are the key column in a feature RDD. + * @param withKeyColumnRDD RDD that contains the key columns. + * @return feature key column names + */ + def getFeatureKeyColumnNamesRdd(sourceKeyExtractor: SourceKeyExtractor, withKeyColumnRDD: RDD[_]): Seq[String] = { + if (withKeyColumnRDD.isEmpty) { + sourceKeyExtractor.getKeyColumnNames(None) + } else { + sourceKeyExtractor.getKeyColumnNames(Some(withKeyColumnRDD.first())) + } + } + + /** + * Extract feature key column names from the input feature DataFrame using the sourceKeyExtractor. + * @param sourceKeyExtractor key extractor that knows what are the key column in a feature RDD. + * @param withKeyColumnDF DataFrame that contains the key columns. + * @return feature key column names + */ + def getFeatureKeyColumnNames(sourceKeyExtractor: SourceKeyExtractor, withKeyColumnDF: DataFrame): Seq[String] = { if (withKeyColumnDF.head(1).isEmpty) { sourceKeyExtractor.getKeyColumnNames(None) } else { @@ -164,7 +197,8 @@ private[offline] object FeatureTransformation { featureAnchorWithSource: FeatureAnchorWithSource, df: DataFrame, requestedFeatureRefString: Seq[String], - inputDateInterval: Option[DateTimeInterval]): TransformedResult = { + inputDateInterval: Option[DateTimeInterval], + mvelContext: Option[FeathrExpressionExecutionContext]): TransformedResult = { val featureNamePrefix = getFeatureNamePrefix(featureAnchorWithSource.featureAnchor.extractor) val featureNamePrefixPairs = requestedFeatureRefString.map((_, featureNamePrefix)) @@ -178,7 +212,7 @@ private[offline] object FeatureTransformation { // so that transformation logic can be written only once DataFrameBasedSqlEvaluator.transform(transformer, df, featureNamePrefixPairs, featureTypeConfigs) case transformer: AnchorExtractor[_] => - DataFrameBasedRowEvaluator.transform(transformer, df, featureNamePrefixPairs, featureTypeConfigs) + DataFrameBasedRowEvaluator.transform(transformer, df, featureNamePrefixPairs, featureTypeConfigs, mvelContext) case _ => throw new FeathrFeatureTransformationException(ErrorLabel.FEATHR_USER_ERROR, s"cannot find valid Transformer for ${featureAnchorWithSource}") } @@ -286,7 +320,8 @@ private[offline] object FeatureTransformation { keyExtractor: SourceKeyExtractor, bloomFilter: Option[BloomFilter], inputDateInterval: Option[DateTimeInterval], - preprocessedDf: Option[DataFrame] = None): KeyedTransformedResult = { + preprocessedDf: Option[DataFrame] = None, + mvelContext: Option[FeathrExpressionExecutionContext]): KeyedTransformedResult = { // Can two diff anchors have different keyExtractor? assert(anchorFeatureGroup.anchorsWithSameSource.map(_.dateParam).distinct.size == 1) val defaultInterval = anchorFeatureGroup.anchorsWithSameSource.head.dateParam.map(OfflineDateTimeUtils.createIntervalFromFeatureGenDateParam) @@ -304,7 +339,8 @@ private[offline] object FeatureTransformation { } val withKeyColumnDF = keyExtractor.appendKeyColumns(sourceDF) - val outputJoinKeyColumnNames = getFeatureJoinKey(keyExtractor, withKeyColumnDF, Some(anchorFeatureGroup.anchorsWithSameSource.head.featureAnchor.extractor)) + + val outputJoinKeyColumnNames = getFeatureKeyColumnNames(keyExtractor, withKeyColumnDF) val filteredFactData = applyBloomFilter((keyExtractor, withKeyColumnDF), bloomFilter) // 1. apply all transformations on the dataframe in sequential order @@ -314,7 +350,7 @@ private[offline] object FeatureTransformation { (prevTransformedResult, featureAnchorWithSource) => { val requestedFeatures = featureAnchorWithSource.selectedFeatures val transformedResultWithoutKey = - transformSingleAnchorDF(featureAnchorWithSource, prevTransformedResult.df, requestedFeatures, inputDateInterval) + transformSingleAnchorDF(featureAnchorWithSource, prevTransformedResult.df, requestedFeatures, inputDateInterval, mvelContext) val namePrefixPairs = prevTransformedResult.featureNameAndPrefixPairs ++ transformedResultWithoutKey.featureNameAndPrefixPairs val columnNameToFeatureNameAndType = prevTransformedResult.inferredFeatureTypes ++ transformedResultWithoutKey.inferredFeatureTypes val featureColumnFormats = prevTransformedResult.featureColumnFormats ++ transformedResultWithoutKey.featureColumnFormats @@ -437,7 +473,8 @@ private[offline] object FeatureTransformation { anchorToSourceDFThisStage: Map[FeatureAnchorWithSource, DataSourceAccessor], requestedFeatureNames: Seq[FeatureName], bloomFilter: Option[BloomFilter], - incrementalAggContext: Option[IncrementalAggContext] = None): Map[FeatureName, KeyedTransformedResult] = { + incrementalAggContext: Option[IncrementalAggContext] = None, + mvelContext: Option[FeathrExpressionExecutionContext]): Map[FeatureName, KeyedTransformedResult] = { val executionService = Executors.newFixedThreadPool(MAX_PARALLEL_FEATURE_GROUP) implicit val executionContext = ExecutionContext.fromExecutorService(executionService) val groupedAnchorToFeatureGroups: Map[FeatureGroupingCriteria, Map[FeatureAnchorWithSource, FeatureGroupWithSameTimeWindow]] = @@ -454,10 +491,21 @@ private[offline] object FeatureTransformation { val keyExtractor = anchorsWithSameSource.head._1.featureAnchor.sourceKeyExtractor val featureAnchorWithSource = anchorsWithSameSource.keys.toSeq val selectedFeatures = anchorsWithSameSource.flatMap(_._2.featureNames).toSeq - - val sourceDF = featureGroupingFactors.source - val transformedResults: Seq[KeyedTransformedResult] = transformMultiAnchorsOnSingleDataFrame(sourceDF, - keyExtractor, featureAnchorWithSource, bloomFilter, selectedFeatures, incrementalAggContext) + val isAvroRddBasedExtractor = featureAnchorWithSource + .map(_.featureAnchor.extractor) + .filter(extractor => extractor.isInstanceOf[CanConvertToAvroRDD] + ).nonEmpty + val transformedResults: Seq[KeyedTransformedResult] = if (isAvroRddBasedExtractor) { + // If there are features are defined using AVRO record based extractor, run RDD based feature transformation + val sourceAccessor = featureGroupingFactors.source + val sourceRdd = sourceAccessor.asInstanceOf[NonTimeBasedDataSourceAccessor].get() + val featureTypeConfigs = featureAnchorWithSource.flatMap(featureAnchor => featureAnchor.featureAnchor.featureTypeConfigs).toMap + Seq(transformFeaturesOnAvroRecord(sourceRdd, keyExtractor, featureAnchorWithSource, bloomFilter, selectedFeatures, featureTypeConfigs)) + } else { + val sourceDF = featureGroupingFactors.source + transformFeaturesOnDataFrameRow(sourceDF, + keyExtractor, featureAnchorWithSource, bloomFilter, selectedFeatures, incrementalAggContext, mvelContext) + } val res = transformedResults .map { transformedResultWithKey => @@ -670,6 +718,204 @@ private[offline] object FeatureTransformation { } } + + /** + * Apply a bloomfilter to a RDD + * + * @param keyExtractor key extractor to extract the key values from the RDD + * @param rdd RDD to filter + * @param bloomFilter bloomfilter used to filter out unwanted row in the RDD based on key columns + * @return filtered RDD + */ + + private def applyBloomFilterRdd(keyExtractor: SourceKeyExtractor, rdd: RDD[IndexedRecord], bloomFilter: Option[BloomFilter]): RDD[IndexedRecord] = { + bloomFilter match { + case None => + // no bloom filter, use data as it + rdd + case Some(filter) => + // get the list of join key columns or expression + keyExtractor match { + case extractor: MVELSourceKeyExtractor => + // get the list of join key columns or expression + val keyColumnsList = if (rdd.isEmpty) { + extractor.getKeyColumnNames(None) + } else { + extractor.getKeyColumnNames(Some(rdd.first)) + } + if (!keyColumnsList.isEmpty) { + val filtered = rdd.filter { record: Any => + val keyVals = extractor.getKey(record) + // if key is not in observation, skip it + if (keyVals != null && keyVals.count(_ == null) == 0) { + filter.mightContainString(SourceUtils.generateFilterKeyString(keyVals)) + } else { + false + } + } + filtered + } else { + // expand feature for seq join does not have right key, so we allow empty here + rdd + } + case _ => throw new FeathrFeatureTransformationException(ErrorLabel.FEATHR_USER_ERROR, "No source key extractor found") + } + } + } + + /** + * Transform features defined in a group of anchors based on same source + * This is for the AVRO record based extractors + * + * @param rdd source that requested features are defined on + * @param keyExtractor key extractor to apply on source rdd + * @param featureAnchorWithSources feature anchors defined on source rdd to be evaluated + * @param bloomFilter bloomfilter to apply on source rdd + * @param requestedFeatureNames requested features + * @param featureTypeConfigs user specified feature types + * @return TransformedResultWithKey The output feature DataFrame conforms to FDS format + */ + private def transformFeaturesOnAvroRecord(df: DataFrame, + keyExtractor: SourceKeyExtractor, + featureAnchorWithSources: Seq[FeatureAnchorWithSource], + bloomFilter: Option[BloomFilter], + requestedFeatureNames: Seq[FeatureName], + featureTypeConfigs: Map[String, FeatureTypeConfig] = Map()): KeyedTransformedResult = { + if (!keyExtractor.isInstanceOf[MVELSourceKeyExtractor]) { + throw new FeathrException(ErrorLabel.FEATHR_ERROR, s"Error processing requested Feature :${requestedFeatureNames}. " + + s"Key extractor ${keyExtractor} must extends MVELSourceKeyExtractor.") + } + val extractor = keyExtractor.asInstanceOf[MVELSourceKeyExtractor] + if (!extractor.anchorExtractorV1.isInstanceOf[CanConvertToAvroRDD]) { + throw new FeathrException(ErrorLabel.FEATHR_ERROR, s"Error processing requested Feature :${requestedFeatureNames}. " + + s"isLowLevelRddExtractor() should return true and convertToAvroRdd should be implemented.") + } + val rdd = extractor.anchorExtractorV1.asInstanceOf[CanConvertToAvroRDD].convertToAvroRdd(df) + val filteredFactData = applyBloomFilterRdd(keyExtractor, rdd, bloomFilter) + + // Build a sequence of 3-tuple of (FeatureAnchorWithSource, featureNamePrefixPairs, AnchorExtractorBase) + val transformInfo = featureAnchorWithSources map { featureAnchorWithSource => + val extractor = featureAnchorWithSource.featureAnchor.extractor + extractor match { + case transformer: AnchorExtractorBase[IndexedRecord] => + // We no longer need prefix for the simplicity of the implementation, instead if there's a feature name + // and source data field clash, we will throw exception and ask user to rename the feature. + val featureNamePrefix = "" + val featureNames = featureAnchorWithSource.selectedFeatures.filter(requestedFeatureNames.contains) + val featureNamePrefixPairs = featureNames.map((_, featureNamePrefix)) + TransformInfo(featureAnchorWithSource, featureNamePrefixPairs, transformer) + + case _ => + throw new FeathrFeatureTransformationException(ErrorLabel.FEATHR_USER_ERROR, s"Unsupported transformer $extractor for features: $requestedFeatureNames") + } + } + + // to avoid name conflict between feature names and the raw data field names + val sourceKeyExtractors = transformInfo.map(_.featureAnchorWithSource.featureAnchor.sourceKeyExtractor) + assert(sourceKeyExtractors.map(_.toString).distinct.size == 1) + + val transformers = transformInfo map (_.transformer) + + /* + * Transform the given RDD by applying extractors to each row to create an RDD[Row] where each Row + * represents keys and feature values + */ + val spark = SparkSession.builder().getOrCreate() + val userProvidedFeatureTypes = transformInfo.flatMap(_.featureAnchorWithSource.featureAnchor.getFeatureTypes.getOrElse(Map.empty[String, FeatureTypes])).toMap + val FeatureTypeInferenceContext(featureTypeAccumulators) = + FeatureTransformation.getTypeInferenceContext(spark, userProvidedFeatureTypes, requestedFeatureNames) + val transformedRdd = filteredFactData map { record => + val (keys, featureValuesWithType) = transformAvroRecord(requestedFeatureNames, sourceKeyExtractors, transformers, record, featureTypeConfigs) + requestedFeatureNames.zip(featureValuesWithType).foreach { + case (featureRef, (_, featureType)) => + if (featureTypeAccumulators(featureRef).isZero && featureType != null) { + // This is lazy evaluated + featureTypeAccumulators(featureRef).add(FeatureTypes.valueOf(featureType.getBasicType.toString)) + } + } + // Create a row by merging a row created from keys and a row created from term-vectors/tensors + Row.merge(Row.fromSeq(keys), Row.fromSeq(featureValuesWithType.map(_._1))) + } + + // Create a DataFrame from the above obtained RDD + val keyNames = getFeatureKeyColumnNamesRdd(sourceKeyExtractors.head, filteredFactData) + val (outputSchema, inferredFeatureTypeConfigs) = { + val allFeatureTypeConfigs = featureAnchorWithSources.flatMap(featureAnchorWithSource => featureAnchorWithSource.featureAnchor.featureTypeConfigs).toMap + val inferredFeatureTypes = inferFeatureTypes(featureTypeAccumulators, transformedRdd, requestedFeatureNames) + val inferredFeatureTypeConfigs = inferredFeatureTypes.map(x => x._1 -> new FeatureTypeConfig(x._2)) + val mergedFeatureTypeConfig = inferredFeatureTypeConfigs ++ allFeatureTypeConfigs + val colPrefix = "" + val featureTensorTypeInfo = getFDSSchemaFields(requestedFeatureNames, mergedFeatureTypeConfig, colPrefix) + val structFields = keyNames.foldRight(List.empty[StructField]) { + case (colName, acc) => + StructField(colName, StringType) :: acc + } + val outputSchema = StructType(StructType(structFields ++ featureTensorTypeInfo)) + (outputSchema, mergedFeatureTypeConfig) + } + val transformedDF = spark.createDataFrame(transformedRdd, outputSchema) + + val featureFormat = FeatureColumnFormat.FDS_TENSOR + val featureColumnFormats = requestedFeatureNames.map(name => name -> featureFormat).toMap + val transformedInfo = TransformedResult(transformInfo.flatMap(_.featureNamePrefixPairs), transformedDF, featureColumnFormats, inferredFeatureTypeConfigs) + KeyedTransformedResult(keyNames, transformedInfo) + } + + /** + * Apply a keyExtractor and feature transformer on a Record to extractor feature values. + * @param requestedFeatureNames requested feature names in the output. Extractors may produce more features than requested. + * @param sourceKeyExtractors extractor to extract the key from the record + * @param transformers transform to produce the feature value from the record + * @param record avro record to work on + * @param featureTypeConfigs user defined feature types + * @return tuple of (feature join key, sequence of (feature value, feature type) in the order of requestedFeatureNames) + */ + private def transformAvroRecord( + requestedFeatureNames: Seq[FeatureName], + sourceKeyExtractors: Seq[SourceKeyExtractor], + transformers: Seq[AnchorExtractorBase[IndexedRecord]], + record: IndexedRecord, + featureTypeConfigs: Map[String, FeatureTypeConfig] = Map()): (Seq[String], Seq[(Any, FeatureType)]) = { + val keys = sourceKeyExtractors.head match { + case mvelSourceKeyExtractor: MVELSourceKeyExtractor => mvelSourceKeyExtractor.getKey(record) + case _ => throw new FeathrFeatureTransformationException(ErrorLabel.FEATHR_USER_ERROR, s"${sourceKeyExtractors.head} is not a valid extractor on RDD") + } + + /* + * For the given row, apply all extractors to extract feature values. If requested as tensors, each feature value + * contains a tensor else a term-vector. + */ + val features = transformers map { + case extractor: AnchorExtractor[IndexedRecord] => + val features = extractor.getFeatures(record) + FeatureValueTypeValidator.validate(features, featureTypeConfigs) + features + case extractor => + throw new FeathrFeatureTransformationException( + ErrorLabel.FEATHR_USER_ERROR, + s"Invalid extractor $extractor for features:" + + s"$requestedFeatureNames requested as tensors") + } reduce (_ ++ _) + if (logger.isTraceEnabled) { + logger.trace(s"Extracted features: $features") + } + + /* + * Retain feature values for only the requested features, and represent each feature value as + * a tensor, as specified. + */ + val featureValuesWithType = requestedFeatureNames map { name => + features.get(name) map { + case featureValue => + val tensorData: TensorData = featureValue.getAsTensorData() + val featureType: FeatureType = featureValue.getFeatureType() + val row = FeaturizedDatasetUtils.tensorToFDSDataFrameRow(tensorData) + (row, featureType) + } getOrElse ((null, null)) // return null if no feature value present + } + (keys, featureValuesWithType) + } + /** * Helper function to be used by groupFeatures. Given a collection of feature anchors which also contains information about grouping * criteria and extractor type per feature anchor, returns a map of FeatureGroupingCriteria to @@ -848,13 +1094,14 @@ private[offline] object FeatureTransformation { * others use direct aggregation * */ - private def transformMultiAnchorsOnSingleDataFrame( + private def transformFeaturesOnDataFrameRow( source: DataSourceAccessor, keyExtractor: SourceKeyExtractor, anchorsWithSameSource: Seq[FeatureAnchorWithSource], bloomFilter: Option[BloomFilter], allRequestedFeatures: Seq[String], - incrementalAggContext: Option[IncrementalAggContext]): Seq[KeyedTransformedResult] = { + incrementalAggContext: Option[IncrementalAggContext], + mvelContext: Option[FeathrExpressionExecutionContext]): Seq[KeyedTransformedResult] = { // based on source and feature definition, divide features into direct transform and incremental // transform groups @@ -864,7 +1111,7 @@ private[offline] object FeatureTransformation { val preprocessedDf = PreprocessedDataFrameManager.getPreprocessedDataframe(anchorsWithSameSource) val directTransformedResult = - directTransformAnchorGroup.map(anchorGroup => Seq(directCalculate(anchorGroup, source, keyExtractor, bloomFilter, None, preprocessedDf))) + directTransformAnchorGroup.map(anchorGroup => Seq(directCalculate(anchorGroup, source, keyExtractor, bloomFilter, None, preprocessedDf, mvelContext))) val incrementalTransformedResult = incrementalTransformAnchorGroup.map { anchorGroup => { @@ -874,7 +1121,7 @@ private[offline] object FeatureTransformation { val incrAggCtx = incrementalAggContext.get val preAggDFs = incrAggCtx.previousSnapshotMap.collect { case (featureName, df) if requestedFeatures.exists(df.columns.contains) => df }.toSeq.distinct // join each previous aggregation dataframe sequentially - val groupKeys = getFeatureJoinKey(keyExtractor, preAggDFs.head) + val groupKeys = getFeatureKeyColumnNames(keyExtractor, preAggDFs.head) val keyColumnNames = getStandardizedKeyNames(groupKeys.size) val firstPreAgg = preAggDFs.head val joinedPreAggDFs = preAggDFs @@ -883,7 +1130,7 @@ private[offline] object FeatureTransformation { baseDF.join(curDF, keyColumnNames) }) val preAggRootDir = incrAggCtx.previousSnapshotRootDirMap(anchorGroup.anchorsWithSameSource.head.selectedFeatures.head) - Seq(incrementalCalculate(anchorGroup, joinedPreAggDFs, source, keyExtractor, bloomFilter, preAggRootDir)) + Seq(incrementalCalculate(anchorGroup, joinedPreAggDFs, source, keyExtractor, bloomFilter, preAggRootDir, mvelContext)) } } @@ -1000,7 +1247,8 @@ private[offline] object FeatureTransformation { source: DataSourceAccessor, keyExtractor: SourceKeyExtractor, bloomFilter: Option[BloomFilter], - preAggRootDir: String): KeyedTransformedResult = { + preAggRootDir: String, + mvelContext: Option[FeathrExpressionExecutionContext]): KeyedTransformedResult = { // get the aggregation window of the feature val aggWindow = getFeatureAggWindow(featureAnchorWithSource) @@ -1013,7 +1261,7 @@ private[offline] object FeatureTransformation { // If so, even though the incremental aggregation succeeds, the result is incorrect. // And the incorrect result will be propagated to all subsequent incremental aggregation because the incorrect result will be used as the snapshot. - val newDeltaSourceAgg = directCalculate(featureAnchorWithSource, source, keyExtractor, bloomFilter, Some(dateParam)) + val newDeltaSourceAgg = directCalculate(featureAnchorWithSource, source, keyExtractor, bloomFilter, Some(dateParam), None, mvelContext) // if the new delta window size is smaller than the request feature window, need to use the pre-aggregated results, if (newDeltaWindowSize < aggWindow) { // add prefixes to feature columns and keys for the previous aggregation snapshot @@ -1034,7 +1282,7 @@ private[offline] object FeatureTransformation { renamedPreAgg } else { // preAgg - oldDeltaAgg - val oldDeltaSourceAgg = directCalculate(featureAnchorWithSource, source, keyExtractor, bloomFilter, Some(oldDeltaWindowInterval)) + val oldDeltaSourceAgg = directCalculate(featureAnchorWithSource, source, keyExtractor, bloomFilter, Some(oldDeltaWindowInterval), None, mvelContext) val oldDeltaAgg = oldDeltaSourceAgg.transformedResult.df mergeDeltaDF(renamedPreAgg, oldDeltaAgg, leftKeyColumnNames, joinKeys, newDeltaFeatureColumnNames, false) } diff --git a/src/main/scala/com/linkedin/feathr/offline/job/LocalFeatureJoinJob.scala b/src/main/scala/com/linkedin/feathr/offline/job/LocalFeatureJoinJob.scala index b3adfc03b..4a38d2304 100644 --- a/src/main/scala/com/linkedin/feathr/offline/job/LocalFeatureJoinJob.scala +++ b/src/main/scala/com/linkedin/feathr/offline/job/LocalFeatureJoinJob.scala @@ -2,6 +2,7 @@ package com.linkedin.feathr.offline.job import com.linkedin.feathr.offline.client.FeathrClient import com.linkedin.feathr.offline.config.FeatureJoinConfig +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.dataloader.DataLoaderHandler import com.linkedin.feathr.offline.source.accessor.DataPathHandler import com.linkedin.feathr.offline.source.dataloader.DataLoaderFactory @@ -34,11 +35,13 @@ object LocalFeatureJoinJob { observationData: SparkFeaturizedDataset, extraParams: Array[String] = Array(), ss: SparkSession = ss, - dataPathHandlers: List[DataPathHandler]): SparkFeaturizedDataset = { + dataPathHandlers: List[DataPathHandler], + mvelContext: Option[FeathrExpressionExecutionContext]): SparkFeaturizedDataset = { val joinConfig = FeatureJoinConfig.parseJoinConfig(joinConfigAsHoconString) val feathrClient = FeathrClient.builder(ss) .addFeatureDef(featureDefAsString) .addDataPathHandlers(dataPathHandlers) + .addFeathrExpressionContext(mvelContext) .build() val outputPath: String = FeatureJoinJob.SKIP_OUTPUT @@ -66,10 +69,11 @@ object LocalFeatureJoinJob { observationDataPath: String, extraParams: Array[String] = Array(), ss: SparkSession = ss, - dataPathHandlers: List[DataPathHandler]): SparkFeaturizedDataset = { + dataPathHandlers: List[DataPathHandler], + mvelContext: Option[FeathrExpressionExecutionContext]=None): SparkFeaturizedDataset = { val dataLoaderHandlers: List[DataLoaderHandler] = dataPathHandlers.map(_.dataLoaderHandler) val obsDf = loadObservationAsFDS(ss, observationDataPath,dataLoaderHandlers=dataLoaderHandlers) - joinWithObsDFAndHoconJoinConfig(joinConfigAsHoconString, featureDefAsString, obsDf, extraParams, ss, dataPathHandlers=dataPathHandlers) + joinWithObsDFAndHoconJoinConfig(joinConfigAsHoconString, featureDefAsString, obsDf, extraParams, ss, dataPathHandlers=dataPathHandlers, mvelContext) } /** diff --git a/src/main/scala/com/linkedin/feathr/offline/join/DataFrameFeatureJoiner.scala b/src/main/scala/com/linkedin/feathr/offline/join/DataFrameFeatureJoiner.scala index e7fccbd08..a03abc83c 100644 --- a/src/main/scala/com/linkedin/feathr/offline/join/DataFrameFeatureJoiner.scala +++ b/src/main/scala/com/linkedin/feathr/offline/join/DataFrameFeatureJoiner.scala @@ -12,6 +12,7 @@ import com.linkedin.feathr.offline.join.algorithms._ import com.linkedin.feathr.offline.join.util.{FrequentItemEstimatorFactory, FrequentItemEstimatorType} import com.linkedin.feathr.offline.join.workflow._ import com.linkedin.feathr.offline.logical.{FeatureGroups, MultiStageJoinPlan} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.DataPathHandler import com.linkedin.feathr.offline.swa.SlidingWindowAggregationJoiner import com.linkedin.feathr.offline.transformation.AnchorToDataSourceMapper @@ -30,7 +31,7 @@ import scala.collection.JavaConverters._ * Joiner to join observation with feature data using Spark DataFrame API * @param logicalPlan analyzed feature info */ -private[offline] class DataFrameFeatureJoiner(logicalPlan: MultiStageJoinPlan, dataPathHandlers: List[DataPathHandler]) extends Serializable { +private[offline] class DataFrameFeatureJoiner(logicalPlan: MultiStageJoinPlan, dataPathHandlers: List[DataPathHandler], mvelContext: Option[FeathrExpressionExecutionContext]) extends Serializable { @transient lazy val log = Logger.getLogger(getClass.getName) @transient lazy val anchorToDataSourceMapper = new AnchorToDataSourceMapper(dataPathHandlers) private val windowAggFeatureStages = logicalPlan.windowAggFeatureStages @@ -69,7 +70,7 @@ private[offline] class DataFrameFeatureJoiner(logicalPlan: MultiStageJoinPlan, d (dfWithFeatureNames, featureAnchorWithSourcePair) => { val featureAnchorWithSource = featureAnchorWithSourcePair._1 val requestedFeatures = featureAnchorWithSourcePair._2.toSeq - val resultWithoutKey = transformSingleAnchorDF(featureAnchorWithSource, dfWithFeatureNames.df, requestedFeatures, None) + val resultWithoutKey = transformSingleAnchorDF(featureAnchorWithSource, dfWithFeatureNames.df, requestedFeatures, None, mvelContext) val namePrefixPairs = dfWithFeatureNames.featureNameAndPrefixPairs ++ resultWithoutKey.featureNameAndPrefixPairs val inferredFeatureTypeConfigs = dfWithFeatureNames.inferredFeatureTypes ++ resultWithoutKey.inferredFeatureTypes val featureColumnFormats = resultWithoutKey.featureColumnFormats ++ dfWithFeatureNames.featureColumnFormats @@ -201,12 +202,12 @@ private[offline] class DataFrameFeatureJoiner(logicalPlan: MultiStageJoinPlan, d AnchoredFeatureJoinStep( SlickJoinLeftJoinKeyColumnAppender, SlickJoinRightJoinKeyColumnAppender, - SparkJoinWithJoinCondition(EqualityJoinConditionBuilder)) + SparkJoinWithJoinCondition(EqualityJoinConditionBuilder), mvelContext) } else { AnchoredFeatureJoinStep( SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, - SparkJoinWithJoinCondition(EqualityJoinConditionBuilder)) + SparkJoinWithJoinCondition(EqualityJoinConditionBuilder), mvelContext) } val FeatureDataFrameOutput(FeatureDataFrame(withAllBasicAnchoredFeatureDF, inferredBasicAnchoredFeatureTypes)) = anchoredFeatureJoinStep.joinFeatures(requiredRegularFeatureAnchors, AnchorJoinStepInput(withWindowAggFeatureDF, anchorSourceAccessorMap)) @@ -223,7 +224,7 @@ private[offline] class DataFrameFeatureJoiner(logicalPlan: MultiStageJoinPlan, d } else withAllBasicAnchoredFeatureDF // 6. Join Derived Features - val derivedFeatureEvaluator = DerivedFeatureEvaluator(ss=ss, featureGroups=featureGroups, dataPathHandlers=dataPathHandlers) + val derivedFeatureEvaluator = DerivedFeatureEvaluator(ss=ss, featureGroups=featureGroups, dataPathHandlers=dataPathHandlers, mvelContext) val derivedFeatureJoinStep = DerivedFeatureJoinStep(derivedFeatureEvaluator) val FeatureDataFrameOutput(FeatureDataFrame(withDerivedFeatureDF, inferredDerivedFeatureTypes)) = derivedFeatureJoinStep.joinFeatures(allRequiredFeatures.filter { diff --git a/src/main/scala/com/linkedin/feathr/offline/join/workflow/AnchoredFeatureJoinStep.scala b/src/main/scala/com/linkedin/feathr/offline/join/workflow/AnchoredFeatureJoinStep.scala index 5e69438f8..7abe3901b 100644 --- a/src/main/scala/com/linkedin/feathr/offline/join/workflow/AnchoredFeatureJoinStep.scala +++ b/src/main/scala/com/linkedin/feathr/offline/join/workflow/AnchoredFeatureJoinStep.scala @@ -12,6 +12,7 @@ import com.linkedin.feathr.offline.job.KeyedTransformedResult import com.linkedin.feathr.offline.join._ import com.linkedin.feathr.offline.join.algorithms._ import com.linkedin.feathr.offline.join.util.FrequentItemEstimatorFactory +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.source.accessor.DataSourceAccessor import com.linkedin.feathr.offline.transformation.DataFrameDefaultValueSubstituter.substituteDefaults import com.linkedin.feathr.offline.util.FeathrUtils @@ -31,7 +32,8 @@ import org.apache.spark.sql.functions.lit private[offline] class AnchoredFeatureJoinStep( leftJoinColumnExtractor: JoinKeyColumnsAppender, rightJoinColumnExtractor: JoinKeyColumnsAppender, - joiner: SparkJoinWithJoinCondition) + joiner: SparkJoinWithJoinCondition, + mvelContext: Option[FeathrExpressionExecutionContext]) extends FeatureJoinStep[AnchorJoinStepInput, DataFrameJoinStepOutput] { @transient lazy val log = Logger.getLogger(getClass.getName) @@ -126,7 +128,7 @@ private[offline] class AnchoredFeatureJoinStep( val anchoredFeaturesThisStage = featureNames.filter(allAnchoredFeatures.contains).map(allAnchoredFeatures).distinct val anchoredDFThisStage = anchorDFMap.filterKeys(anchoredFeaturesThisStage.toSet) // map feature name to its transformed dataframe and the join key of the dataframe - val featureToDFAndJoinKeys = transformFeatures(anchoredDFThisStage, anchoredFeatureNamesThisStage, bloomFilter) + val featureToDFAndJoinKeys = transformFeatures(anchoredDFThisStage, anchoredFeatureNamesThisStage, bloomFilter, None, mvelContext) featureToDFAndJoinKeys .groupBy(_._2.transformedResult.df) // group by dataframe, join one at a time .map(grouped => (grouped._2.keys.toSeq, grouped._2.values.toSeq)) // extract the feature names and their (dataframe,join keys) pairs @@ -226,6 +228,7 @@ private[offline] object AnchoredFeatureJoinStep { def apply( leftJoinColumnExtractor: JoinKeyColumnsAppender, rightJoinColumnExtractor: JoinKeyColumnsAppender, - joiner: SparkJoinWithJoinCondition): AnchoredFeatureJoinStep = - new AnchoredFeatureJoinStep(leftJoinColumnExtractor, rightJoinColumnExtractor, joiner) + joiner: SparkJoinWithJoinCondition, + mvelContext: Option[FeathrExpressionExecutionContext]): AnchoredFeatureJoinStep = + new AnchoredFeatureJoinStep(leftJoinColumnExtractor, rightJoinColumnExtractor, joiner, mvelContext) } diff --git a/src/main/scala/com/linkedin/feathr/offline/mvel/MvelContext.java b/src/main/scala/com/linkedin/feathr/offline/mvel/MvelContext.java index ce5926605..1ce8136c9 100644 --- a/src/main/scala/com/linkedin/feathr/offline/mvel/MvelContext.java +++ b/src/main/scala/com/linkedin/feathr/offline/mvel/MvelContext.java @@ -4,11 +4,11 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.feathr.common.FeatureValue; import com.linkedin.feathr.common.util.MvelContextUDFs; +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext; import org.apache.avro.generic.GenericEnumSymbol; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema; -import org.mvel2.DataConversion; import org.mvel2.MVEL; import org.mvel2.ParserConfiguration; import org.mvel2.ParserContext; @@ -114,9 +114,9 @@ public static ParserContext newParserContext() { * {@link com.linkedin.feathr.offline.mvel.plugins.FeathrMvelPluginContext}. (Output objects that can be converted * to {@link FeatureValue} via plugins, will be converted after MVEL returns.) */ - public static Object executeExpressionWithPluginSupport(Object compiledExpression, Object ctx) { + public static Object executeExpressionWithPluginSupport(Object compiledExpression, Object ctx, FeathrExpressionExecutionContext mvelContext) { Object output = MVEL.executeExpression(compiledExpression, ctx); - return coerceToFeatureValueViaMvelDataConversionPlugins(output); + return coerceToFeatureValueViaMvelDataConversionPlugins(output, mvelContext); } /** @@ -124,15 +124,18 @@ public static Object executeExpressionWithPluginSupport(Object compiledExpressio * {@link com.linkedin.feathr.offline.mvel.plugins.FeathrMvelPluginContext}. (Output objects that can be converted * to {@link FeatureValue} via plugins, will be converted after MVEL returns.) */ - public static Object executeExpressionWithPluginSupport(Object compiledExpression, Object ctx, - VariableResolverFactory variableResolverFactory) { + public static Object executeExpressionWithPluginSupportWithFactory(Object compiledExpression, + Object ctx, + VariableResolverFactory variableResolverFactory, + FeathrExpressionExecutionContext mvelContext) { Object output = MVEL.executeExpression(compiledExpression, ctx, variableResolverFactory); - return coerceToFeatureValueViaMvelDataConversionPlugins(output); + return coerceToFeatureValueViaMvelDataConversionPlugins(output, mvelContext); } - private static Object coerceToFeatureValueViaMvelDataConversionPlugins(Object input) { - if (input != null && DataConversion.canConvert(FeatureValue.class, input.getClass())) { - return DataConversion.convert(input, FeatureValue.class); + private static Object coerceToFeatureValueViaMvelDataConversionPlugins(Object input, FeathrExpressionExecutionContext mvelContext) { + // Convert the input to feature value using the given MvelContext if possible + if (input != null && mvelContext!= null && mvelContext.canConvert(FeatureValue.class, input.getClass())) { + return mvelContext.convert(input, FeatureValue.class); } else { return input; } diff --git a/src/main/scala/com/linkedin/feathr/offline/mvel/MvelUtils.scala b/src/main/scala/com/linkedin/feathr/offline/mvel/MvelUtils.scala index 8da9e2272..db467b0cf 100644 --- a/src/main/scala/com/linkedin/feathr/offline/mvel/MvelUtils.scala +++ b/src/main/scala/com/linkedin/feathr/offline/mvel/MvelUtils.scala @@ -1,9 +1,10 @@ package com.linkedin.feathr.offline.mvel +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import org.apache.commons.lang.exception.ExceptionUtils import org.apache.log4j.Logger +import org.mvel2.PropertyAccessException import org.mvel2.integration.VariableResolverFactory -import org.mvel2.{MVEL, PropertyAccessException} private[offline] object MvelUtils { @transient private lazy val log = Logger.getLogger(getClass) @@ -15,9 +16,9 @@ private[offline] object MvelUtils { // This approach has pros and cons and will likely be controversial // But it should allow for much simpler expressions for extracting features from data sets whose values may often be null // (We might not want to check for null explicitly everywhere) - def executeExpression(compiledExpression: Any, input: Any, resolverFactory: VariableResolverFactory, featureName: String = ""): Option[AnyRef] = { + def executeExpression(compiledExpression: Any, input: Any, resolverFactory: VariableResolverFactory, featureName: String = "", mvelContext: Option[FeathrExpressionExecutionContext]): Option[AnyRef] = { try { - Option(MvelContext.executeExpressionWithPluginSupport(compiledExpression, input, resolverFactory)) + Option(MvelContext.executeExpressionWithPluginSupportWithFactory(compiledExpression, input, resolverFactory, mvelContext.orNull)) } catch { case e: RuntimeException => log.debug(s"Expression $compiledExpression on input record $input threw exception", e) diff --git a/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrExpressionExecutionContext.scala b/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrExpressionExecutionContext.scala new file mode 100644 index 000000000..67371464f --- /dev/null +++ b/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrExpressionExecutionContext.scala @@ -0,0 +1,144 @@ +package com.linkedin.feathr.offline.mvel.plugins + +import com.linkedin.feathr.common.FeatureValue +import org.apache.spark.SparkContext +import org.apache.spark.broadcast.Broadcast +import org.mvel2.ConversionHandler +import org.mvel2.conversion.ArrayHandler +import org.mvel2.util.ReflectionUtil.{isAssignableFrom, toNonPrimitiveType} + +import java.io.Serializable +import scala.collection.mutable + +/** + * The context needed for the Feathr expression transformation language, in order to + * support the automatic conversion between the Feather Feature value class and + * some customized external data, e.g. 3rd-party feature value class. + * It is intended for advanced cases to enable compatibility with old versions of FeathrExpression language + * and that most users would not need to use it. + */ +class FeathrExpressionExecutionContext extends Serializable { + + // A map of converters that are registered to convert a class into customized data format. + // This include convert from and to feature value. + // The Map is broadcast from the driver to executors + private var converters: Broadcast[mutable.HashMap[String, ConversionHandler]] = null + // A map of adaptors that are registered to convert a Feathr FeatureValue to customized external data format + // The Map is broadcasted from the driver to executors + private var featureValueTypeAdaptors: Broadcast[mutable.HashMap[String, FeatureValueTypeAdaptor[AnyRef]]] = null + + // Same as converters, used to build the map on the driver during the job initialization. + // Will be broadcasted to all executors and available as converters + private val localConverters = new mutable.HashMap[String, ConversionHandler] + // Same as featureValueTypeAdaptors, used to build the map on the driver during the job initialization. + // Will be broadcasted to all executors and available as converters + private val localFeatureValueTypeAdaptors = new mutable.HashMap[String, FeatureValueTypeAdaptor[AnyRef]] + + /** + * Setup Executor Mvel Expression Context by adding a type adaptor to Feathr's MVEL runtime, + * it will enable Feathr's expressions to support some alternative + * class representation of {@link FeatureValue} via coercion. + * + * @param clazz the class of the "other" alternative representation of feature value + * @param typeAdaptor the type adaptor that can convert between the "other" representation and {@link FeatureValue} + * @param < T> type parameter for the "other" feature value class + */ + def setupExecutorMvelContext[T](clazz: Class[T], typeAdaptor: FeatureValueTypeAdaptor[T], sc: SparkContext): Unit = { + localFeatureValueTypeAdaptors.put(clazz.getCanonicalName, typeAdaptor.asInstanceOf[FeatureValueTypeAdaptor[AnyRef]]) + featureValueTypeAdaptors = sc.broadcast(localFeatureValueTypeAdaptors) + // Add a converter that can convert external data to feature value + addConversionHandler(classOf[FeatureValue], new ExternalDataToFeatureValueHandler(featureValueTypeAdaptors), sc) + // Add a converter that can convert a feature value to external data + addConversionHandler(clazz, new FeatureValueToExternalDataHandler(typeAdaptor), sc) + } + + /** + * Check if there is registered converters that can handle the conversion. + * @param toType type to convert to + * @param convertFrom type to convert from + * @return whether it can be converted or not + */ + def canConvert(toType: Class[_], convertFrom: Class[_]): Boolean = { + if (isAssignableFrom(toType, convertFrom)) return true + if (converters.value.contains(toType.getCanonicalName)) { + converters.value.get(toType.getCanonicalName).get.canConvertFrom(toNonPrimitiveType(convertFrom)) + } else if (toType.isArray && canConvert(toType.getComponentType, convertFrom)) { + true + } else { + false + } + } + + /** + * Convert the input to output type using the registered converters + * @param in value to be converted + * @param toType output type + * @tparam T + * @return + */ + def convert[T](in: Any, toType: Class[T]): T = { + if ((toType eq in.getClass) || toType.isAssignableFrom(in.getClass)) return in.asInstanceOf[T] + val converter = if (converters.value != null) { + converters.value.get(toType.getCanonicalName).get + } else { + throw new RuntimeException(s"Cannot convert ${in} to ${toType} due to no converters found.") + } + if (converter == null && toType.isArray) { + val handler = new ArrayHandler(toType) + converters.value.put(toType.getCanonicalName, handler) + handler.convertFrom(in).asInstanceOf[T] + } + else converter.convertFrom(in).asInstanceOf[T] + } + + /** + * Register a new {@link ConversionHandler} with the factory. + * + * @param type - Target type represented by the conversion handler. + * @param handler - An instance of the handler. + */ + private[plugins] def addConversionHandler(`type`: Class[_], handler: ConversionHandler, sc: SparkContext): Unit = { + localConverters.put(`type`.getCanonicalName, handler) + converters = sc.broadcast( localConverters) + } + + /** + * Convert Feathr FeatureValue to external FeatureValue + * @param adaptor An adaptor that knows how to convert the Feathr feature value to requested external data + */ + class FeatureValueToExternalDataHandler(val adaptor: FeatureValueTypeAdaptor[_]) + extends ConversionHandler with Serializable { + /** + * Convert a FeatureValue into requested external data + * @param fv the input feature value + * @return requested external data + */ + override def convertFrom(fv: Any): AnyRef = adaptor.fromFeathrFeatureValue(fv.asInstanceOf[FeatureValue]).asInstanceOf[AnyRef] + + override def canConvertFrom(cls: Class[_]): Boolean = classOf[FeatureValue] == cls + } + + + /** + * Convert external data types to Feathr FeatureValue automatically + * @param adaptors a map of adaptors that knows how to convert external data to feature value. + * It maps the supported input class name to its adaptor. + */ + class ExternalDataToFeatureValueHandler(val adaptors: Broadcast[mutable.HashMap[String, FeatureValueTypeAdaptor[AnyRef]]]) + extends ConversionHandler with Serializable { + + /** + * Convert external data to a Feature value + * + * @param externalData to convert + * @return result feature value + */ + def convertFrom(externalData: Any): AnyRef = { + val adaptor = adaptors.value.get(externalData.getClass.getCanonicalName).get + if (adaptor == null) throw new IllegalArgumentException("Can't convert to Feathr FeatureValue from " + externalData + ", current type adaptors: " + adaptors.value.keySet.mkString(",")) + adaptor.toFeathrFeatureValue(externalData.asInstanceOf[AnyRef]) + } + + override def canConvertFrom(cls: Class[_]): Boolean = adaptors.value.contains(cls.getCanonicalName) + } +} diff --git a/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrMvelPluginContext.java b/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrMvelPluginContext.java deleted file mode 100644 index b672007bc..000000000 --- a/src/main/scala/com/linkedin/feathr/offline/mvel/plugins/FeathrMvelPluginContext.java +++ /dev/null @@ -1,79 +0,0 @@ -package com.linkedin.feathr.offline.mvel.plugins; - -import com.linkedin.feathr.common.FeatureValue; -import com.linkedin.feathr.common.InternalApi; -import org.mvel2.ConversionHandler; -import org.mvel2.DataConversion; - -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - - -/** - * A plugin that allows an advanced user to add additional capabilities or behaviors to Feathr's MVEL runtime. - * - * NOTE: This class is intended for advanced users only, and specifically as a "migration aid" for migrating from - * some previous versions of Feathr whose FeatureValue representations had a different class name, while preserving - * compatibility with feature definitions written against those older versions of Feathr. - */ -public class FeathrMvelPluginContext { - // TODO: Does this need to be "translated" into a different pattern whereby we track the CLASSNAME of the type adaptors - // instead of the instance, such that the class mappings can be broadcasted via Spark and then reinitialized on - // executor hosts? - private static final ConcurrentMap, FeatureValueTypeAdaptor> TYPE_ADAPTORS; - - static { - TYPE_ADAPTORS = new ConcurrentHashMap<>(); - DataConversion.addConversionHandler(FeatureValue.class, new FeathrFeatureValueConversionHandler()); - } - - /** - * Add a type adaptor to Feathr's MVEL runtime, that will enable Feathr's expressions to support some alternative - * class representation of {@link FeatureValue} via coercion. - * @param clazz the class of the "other" alternative representation of feature value - * @param typeAdaptor the type adaptor that can convert between the "other" representation and {@link FeatureValue} - * @param type parameter for the "other" feature value class - */ - @SuppressWarnings("unchecked") - public static void addFeatureTypeAdaptor(Class clazz, FeatureValueTypeAdaptor typeAdaptor) { - // TODO: MAKE SURE clazz IS NOT ONE OF THE CLASSES ALREADY COVERED IN org.mvel2.DataConversion.CONVERTERS! - // IF WE OVERRIDE ANY OF THOSE, IT MIGHT CAUSE MVEL TO BEHAVE IN STRANGE AND UNEXPECTED WAYS! - TYPE_ADAPTORS.put(clazz, typeAdaptor); - DataConversion.addConversionHandler(clazz, new ExternalFeatureValueConversionHandler(typeAdaptor)); - } - - static class FeathrFeatureValueConversionHandler implements ConversionHandler { - @Override - @SuppressWarnings("unchecked") - public Object convertFrom(Object in) { - FeatureValueTypeAdaptor adaptor = (FeatureValueTypeAdaptor) TYPE_ADAPTORS.get(in.getClass()); - if (adaptor == null) { - throw new IllegalArgumentException("Can't convert to Feathr FeatureValue from " + in); - } - return adaptor.toFeathrFeatureValue(in); - } - - @Override - public boolean canConvertFrom(Class cls) { - return TYPE_ADAPTORS.containsKey(cls); - } - } - - static class ExternalFeatureValueConversionHandler implements ConversionHandler { - private final FeatureValueTypeAdaptor _adaptor; - - public ExternalFeatureValueConversionHandler(FeatureValueTypeAdaptor adaptor) { - _adaptor = adaptor; - } - - @Override - public Object convertFrom(Object in) { - return _adaptor.fromFeathrFeatureValue((FeatureValue) in); - } - - @Override - public boolean canConvertFrom(Class cls) { - return FeatureValue.class.equals(cls); - } - } -} diff --git a/src/main/scala/com/linkedin/feathr/offline/source/DataSource.scala b/src/main/scala/com/linkedin/feathr/offline/source/DataSource.scala index 8c132cf4a..ba207b4fd 100644 --- a/src/main/scala/com/linkedin/feathr/offline/source/DataSource.scala +++ b/src/main/scala/com/linkedin/feathr/offline/source/DataSource.scala @@ -3,7 +3,6 @@ package com.linkedin.feathr.offline.source import com.linkedin.feathr.offline.config.location.{DataLocation, SimplePath} import com.linkedin.feathr.offline.source.SourceFormatType.SourceFormatType import com.linkedin.feathr.offline.util.{AclCheckUtils, HdfsUtils, LocalFeatureJoinUtils} -import org.apache.hadoop.fs.Path import org.apache.spark.sql.SparkSession import scala.util.{Failure, Success, Try} @@ -26,29 +25,33 @@ private[offline] case class DataSource( timePartitionPattern: Option[String]) extends Serializable { private lazy val ss: SparkSession = SparkSession.builder().getOrCreate() - val path: String = resolveLatest(location.getPath, None) match { - case Success(resolvedPath) => resolvedPath - case Failure(_) => location.getPath // resolved failed - } - + val path: String = resolveLatest(location.getPath, None) val pathList: Array[String] = - if (location.isInstanceOf[SimplePath] && sourceType == SourceFormatType.LIST_PATH) path.split(";") - else Array(path) + if (location.isInstanceOf[SimplePath] && sourceType == SourceFormatType.LIST_PATH) { + path.split(";").map(resolveLatest(_, None)) + } else { + Array(path) + } // resolve path with #LATEST - def resolveLatest(path: String, mockDataBaseDir: Option[String]): Try[String] = { - Try(if (path.contains(AclCheckUtils.LATEST_PATTERN)) { - val hadoopConf = ss.sparkContext.hadoopConfiguration - if (ss.sparkContext.isLocal && LocalFeatureJoinUtils.getMockPathIfExist(path, hadoopConf, mockDataBaseDir).isDefined) { - val mockPath = LocalFeatureJoinUtils.getMockPathIfExist(path, hadoopConf, mockDataBaseDir).get - val resolvedPath = HdfsUtils.getLatestPath(mockPath, hadoopConf) - LocalFeatureJoinUtils.getOriginalFromMockPath(resolvedPath, mockDataBaseDir) - } else if (new Path(path).getFileSystem(hadoopConf).exists(new Path(path))) { - HdfsUtils.getLatestPath(path, hadoopConf) + def resolveLatest(path: String, mockDataBaseDir: Option[String]): String = { + Try( + if (path.contains(AclCheckUtils.LATEST_PATTERN)) { + val hadoopConf = ss.sparkContext.hadoopConfiguration + if (ss.sparkContext.isLocal && LocalFeatureJoinUtils.getMockPathIfExist(path, hadoopConf, mockDataBaseDir).isDefined) { + val mockPath = LocalFeatureJoinUtils.getMockPathIfExist(path, hadoopConf, mockDataBaseDir).get + val resolvedPath = HdfsUtils.getLatestPath(mockPath, hadoopConf) + LocalFeatureJoinUtils.getOriginalFromMockPath(resolvedPath, mockDataBaseDir) + } else { + HdfsUtils.getLatestPath(path, hadoopConf) + } } else { path } - } else path) + ) match { + case Success(resolvedPath) => resolvedPath + case Failure(_) => path // resolved failed + } } override def toString(): String = "path: " + path + ", sourceType:" + sourceType diff --git a/src/main/scala/com/linkedin/feathr/offline/source/accessor/NonTimeBasedDataSourceAccessor.scala b/src/main/scala/com/linkedin/feathr/offline/source/accessor/NonTimeBasedDataSourceAccessor.scala index 385a0a833..2eaca9db0 100644 --- a/src/main/scala/com/linkedin/feathr/offline/source/accessor/NonTimeBasedDataSourceAccessor.scala +++ b/src/main/scala/com/linkedin/feathr/offline/source/accessor/NonTimeBasedDataSourceAccessor.scala @@ -1,6 +1,6 @@ package com.linkedin.feathr.offline.source.accessor -import com.linkedin.feathr.offline.config.location.{GenericLocation, Jdbc, KafkaEndpoint, PathList, SimplePath} +import com.linkedin.feathr.offline.config.location.{GenericLocation, Jdbc, PathList, SimplePath} import com.linkedin.feathr.offline.source.DataSource import com.linkedin.feathr.offline.source.dataloader.DataLoaderFactory import com.linkedin.feathr.offline.testfwk.TestFwkUtils @@ -28,7 +28,7 @@ private[offline] class NonTimeBasedDataSourceAccessor( override def get(): DataFrame = { println(s"NonTimeBasedDataSourceAccessor loading source ${source.location}") val df = source.location match { - case SimplePath(path) => List(path).map(fileLoaderFactory.create(_).loadDataFrame()).reduce((x, y) => x.fuzzyUnion(y)) + case SimplePath(_) => List(source.path).map(fileLoaderFactory.create(_).loadDataFrame()).reduce((x, y) => x.fuzzyUnion(y)) case PathList(paths) => paths.map(fileLoaderFactory.create(_).loadDataFrame()).reduce((x, y) => x.fuzzyUnion(y)) case Jdbc(_, _, _, _, _) => source.location.loadDf(SparkSession.builder().getOrCreate()) case GenericLocation(_, _) => source.location.loadDf(SparkSession.builder().getOrCreate()) diff --git a/src/main/scala/com/linkedin/feathr/offline/transformation/DataFrameBasedRowEvaluator.scala b/src/main/scala/com/linkedin/feathr/offline/transformation/DataFrameBasedRowEvaluator.scala index 0bdb013d1..cc6cba1c7 100644 --- a/src/main/scala/com/linkedin/feathr/offline/transformation/DataFrameBasedRowEvaluator.scala +++ b/src/main/scala/com/linkedin/feathr/offline/transformation/DataFrameBasedRowEvaluator.scala @@ -5,7 +5,9 @@ import com.linkedin.feathr.common.tensor.TensorData import com.linkedin.feathr.common.{AnchorExtractor, FeatureTypeConfig, FeatureTypes, SparkRowExtractor} import com.linkedin.feathr.offline import com.linkedin.feathr.offline.FeatureDataFrame +import com.linkedin.feathr.offline.anchored.anchorExtractor.SimpleConfigurableAnchorExtractor import com.linkedin.feathr.offline.job.{FeatureTransformation, FeatureTypeInferenceContext, TransformedResult} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.util.FeaturizedDatasetUtils import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema @@ -32,7 +34,8 @@ private[offline] object DataFrameBasedRowEvaluator { def transform(transformer: AnchorExtractor[_], inputDf: DataFrame, requestedFeatureNameAndPrefix: Seq[(String, String)], - featureTypeConfigs: Map[String, FeatureTypeConfig]): TransformedResult = { + featureTypeConfigs: Map[String, FeatureTypeConfig], + mvelContext: Option[FeathrExpressionExecutionContext]): TransformedResult = { if (!transformer.isInstanceOf[SparkRowExtractor]) { throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, s"${transformer} must extend SparkRowExtractor.") } @@ -42,7 +45,7 @@ private[offline] object DataFrameBasedRowEvaluator { val featureFormat = FeatureColumnFormat.FDS_TENSOR // features to calculate, if empty, will calculate all features defined in the extractor val selectedFeatureNames = if (requestedFeatureRefString.nonEmpty) requestedFeatureRefString else transformer.getProvidedFeatureNames - val FeatureDataFrame(transformedDF, transformedFeatureTypes) = transformToFDSTensor(extractor, inputDf, selectedFeatureNames, featureTypeConfigs) + val FeatureDataFrame(transformedDF, transformedFeatureTypes) = transformToFDSTensor(extractor, inputDf, selectedFeatureNames, featureTypeConfigs, mvelContext) TransformedResult( // Re-compute the featureNamePrefixPairs because feature names can be coming from the extractor. selectedFeatureNames.map((_, featureNamePrefix)), @@ -64,22 +67,27 @@ private[offline] object DataFrameBasedRowEvaluator { private def transformToFDSTensor(rowExtractor: SparkRowExtractor, inputDF: DataFrame, featureRefStrs: Seq[String], - featureTypeConfigs: Map[String, FeatureTypeConfig]): FeatureDataFrame = { + featureTypeConfigs: Map[String, FeatureTypeConfig], + mvelContext: Option[FeathrExpressionExecutionContext]): FeatureDataFrame = { val inputSchema = inputDF.schema val spark = SparkSession.builder().getOrCreate() val featureTypes = featureTypeConfigs.mapValues(_.getFeatureType) val FeatureTypeInferenceContext(featureTypeAccumulators) = FeatureTransformation.getTypeInferenceContext(spark, featureTypes, featureRefStrs) + val transformedRdd = inputDF.rdd.map(row => { - // in some cases, the input dataframe row here only have Row and does not have schema attached, - // while MVEL only works with GenericRowWithSchema, create it manually - val rowWithSchema = if (row.isInstanceOf[GenericRowWithSchema]) { - row.asInstanceOf[GenericRowWithSchema] - } else { - new GenericRowWithSchema(row.toSeq.toArray, inputSchema) - } - val result = rowExtractor.getFeaturesFromRow(rowWithSchema) - val featureValues = featureRefStrs map { + // in some cases, the input dataframe row here only have Row and does not have schema attached, + // while MVEL only works with GenericRowWithSchema, create it manually + val rowWithSchema = if (row.isInstanceOf[GenericRowWithSchema]) { + row.asInstanceOf[GenericRowWithSchema] + } else { + new GenericRowWithSchema(row.toSeq.toArray, inputSchema) + } + if (rowExtractor.isInstanceOf[SimpleConfigurableAnchorExtractor]) { + rowExtractor.asInstanceOf[SimpleConfigurableAnchorExtractor].mvelContext = mvelContext + } + val result = rowExtractor.getFeaturesFromRow(rowWithSchema) + val featureValues = featureRefStrs map { featureRef => if (result.contains(featureRef)) { val featureValue = result(featureRef) @@ -88,7 +96,7 @@ private[offline] object DataFrameBasedRowEvaluator { featureTypeAccumulators(featureRef).add(FeatureTypes.valueOf(rowFeatureType.toString)) } val tensorData: TensorData = featureValue.getAsTensorData() - FeaturizedDatasetUtils.tensorToDataFrameRow(tensorData) + FeaturizedDatasetUtils.tensorToFDSDataFrameRow(tensorData) } else null } Row.merge(row, Row.fromSeq(featureValues)) diff --git a/src/main/scala/com/linkedin/feathr/offline/transformation/DefaultValueSubstituter.scala b/src/main/scala/com/linkedin/feathr/offline/transformation/DefaultValueSubstituter.scala index 366967cc2..bf5d70c75 100644 --- a/src/main/scala/com/linkedin/feathr/offline/transformation/DefaultValueSubstituter.scala +++ b/src/main/scala/com/linkedin/feathr/offline/transformation/DefaultValueSubstituter.scala @@ -112,7 +112,7 @@ private[offline] object DataFrameDefaultValueSubstituter extends DataFrameDefaul // For tensor default, since we don't have type, so we need to use expr to construct the default column val schema = field.dataType val tensorData = defaultFeatureValue.getAsTensorData - val ts = FeaturizedDatasetUtils.tensorToDataFrameRow(tensorData) + val ts = FeaturizedDatasetUtils.tensorToFDSDataFrameRow(tensorData, Some(schema)) val fdsTensorDefaultUDF = getFDSTensorDefaultUDF(schema, ts) ss.udf.register("tz_udf", fdsTensorDefaultUDF) expr(s"tz_udf($featureColumnName)") diff --git a/src/main/scala/com/linkedin/feathr/offline/transformation/FDSConversionUtils.scala b/src/main/scala/com/linkedin/feathr/offline/transformation/FDSConversionUtils.scala index 824f48fe3..25d96af11 100644 --- a/src/main/scala/com/linkedin/feathr/offline/transformation/FDSConversionUtils.scala +++ b/src/main/scala/com/linkedin/feathr/offline/transformation/FDSConversionUtils.scala @@ -2,14 +2,13 @@ package com.linkedin.feathr.offline.transformation import com.linkedin.feathr.common.exception.{ErrorLabel, FeathrException} import com.linkedin.feathr.common.tensor.TensorData - -import java.util import com.linkedin.feathr.common.util.CoercionUtils import com.linkedin.feathr.offline.util.FeaturizedDatasetUtils import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.types._ +import java.util import scala.collection.JavaConverters._ import scala.collection.convert.Wrappers.JMapWrapper import scala.collection.mutable @@ -37,7 +36,7 @@ private[offline] object FDSConversionUtils { // convert the "raw" input data into a FDS column a specific dataType rawFeatureValue match { case tensorData: TensorData => - FeaturizedDatasetUtils.tensorToDataFrameRow(tensorData, Some(targetDataType)) + FeaturizedDatasetUtils.tensorToFDSDataFrameRow(tensorData, Some(targetDataType)) case _ => targetDataType match { // Scalar tensor @@ -253,7 +252,13 @@ private[offline] object FDSConversionUtils { case values: util.ArrayList[Any] => values.asScala.toArray case values: mutable.WrappedArray[Any] => - values.asInstanceOf[mutable.WrappedArray[Any]].toArray + if (values.nonEmpty && values(0).isInstanceOf[GenericRowWithSchema]) { + // Assuming the result is returned by SWA feature with groupBy, hence keeping only the + // feature value as an array and dropping the index info. + values.asInstanceOf[mutable.WrappedArray[GenericRowWithSchema]].map(v => v.get(v.size - 1)).toArray + } else { + values.toArray + } case values: List[Any] => values.toArray case mapValues: Map[Integer, Any] => diff --git a/src/main/scala/com/linkedin/feathr/offline/util/FeathrTestUtils.scala b/src/main/scala/com/linkedin/feathr/offline/util/FeathrTestUtils.scala index 23fca857f..47af7d5b1 100644 --- a/src/main/scala/com/linkedin/feathr/offline/util/FeathrTestUtils.scala +++ b/src/main/scala/com/linkedin/feathr/offline/util/FeathrTestUtils.scala @@ -1,11 +1,10 @@ package com.linkedin.feathr.offline.util -import org.apache.spark.sql.internal.SQLConf -import Transformations.sortColumns import com.linkedin.feathr.offline.config.datasource.{DataSourceConfigUtils, DataSourceConfigs} -import com.linkedin.feathr.offline.job.FeatureGenJob +import com.linkedin.feathr.offline.util.Transformations.sortColumns import org.apache.avro.generic.GenericRecord import org.apache.spark.SparkConf +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.{DataFrame, Row, SparkSession} private[offline] object FeathrTestUtils { diff --git a/src/main/scala/com/linkedin/feathr/offline/util/FeatureValueTypeValidator.scala b/src/main/scala/com/linkedin/feathr/offline/util/FeatureValueTypeValidator.scala index ee06f3acd..aec0b1aea 100644 --- a/src/main/scala/com/linkedin/feathr/offline/util/FeatureValueTypeValidator.scala +++ b/src/main/scala/com/linkedin/feathr/offline/util/FeatureValueTypeValidator.scala @@ -16,7 +16,7 @@ private[offline] object FeatureValueTypeValidator { features.foreach { case (key, value) => featureTypeConfigs.get(key).foreach( - featureTypeConfig => FeatureValueTypeValidator.validate(value, featureTypeConfig)) + featureTypeConfig => FeatureValueTypeValidator.validate(key, value, featureTypeConfig)) } } @@ -27,9 +27,9 @@ private[offline] object FeatureValueTypeValidator { * @param featureValue value extracted from data * @param featureTypeConfig user-defined config, optional */ - def validate(featureValue: FeatureValue, featureTypeConfig: Option[FeatureTypeConfig]): Unit = { + def validate(featureValue: FeatureValue, featureTypeConfig: Option[FeatureTypeConfig], featureName: String): Unit = { featureTypeConfig match { - case Some(f) => validate(featureValue, f) + case Some(f) => validate(featureName, featureValue, f) case None => } } @@ -41,31 +41,31 @@ private[offline] object FeatureValueTypeValidator { * @param featureValue value extracted from data * @param featureTypeConfig user-defined config */ - def validate(featureValue: FeatureValue, featureTypeConfig: FeatureTypeConfig): Unit = { + def validate(featureName: String, featureValue: FeatureValue, featureTypeConfig: FeatureTypeConfig): Unit = { val configFeatureTypes = featureTypeConfig.getFeatureType val valueBasicType = featureValue.getFeatureType.getBasicType if (configFeatureTypes != FeatureTypes.UNSPECIFIED) { if (valueBasicType != FeatureType.BasicType.TENSOR || configFeatureTypes != FeatureTypes.TENSOR) { if (configFeatureTypes != FeatureTypes.valueOf(valueBasicType.name)) { - throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The FeatureValue type: " + valueBasicType - + " is not consistent with the type specified in the Feathr config: ." + configFeatureTypes); + throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The FeatureValue type of : " + featureName + + " is " + valueBasicType + ", which is not consistent with the type specified in the Feathr config: ." + configFeatureTypes); } } else if (featureTypeConfig.getTensorType != null) { val configTensorType = featureTypeConfig.getTensorType val valueTensorType = featureValue.getAsTypedTensor.getType if (configTensorType.getValueType != null && configTensorType.getValueType != valueTensorType.getValueType) { - throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor value type: " + valueTensorType - + " is not consistent with the type specified in the Feathr config: ." + configTensorType); + throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor value type of :" + featureName + + " is " + valueTensorType + ", which is not consistent with the type specified in the Feathr config: ." + configTensorType); } if (configTensorType.getTensorCategory != null && configTensorType.getTensorCategory != valueTensorType.getTensorCategory) { - throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor category type: " + valueTensorType - + " is not consistent with the type specified in the Feathr config: ." + configTensorType); + throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor category type of : " + featureName + " is " + + valueTensorType + ", which is not consistent with the type specified in the Feathr config: ." + configTensorType); } if (configTensorType.getDimensionTypes != null && configTensorType.getDimensionTypes != valueTensorType.getDimensionTypes) { - throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor dimension type: " + valueTensorType - + " is not consistent with the type specified in the Feathr config: ." + configTensorType); + throw new FeathrException(ErrorLabel.FEATHR_USER_ERROR, "The tensor dimension type of : " + featureName + " is " + + valueTensorType + ", which is not consistent with the type specified in the Feathr config: ." + configTensorType); } } } diff --git a/src/main/scala/com/linkedin/feathr/offline/util/FeaturizedDatasetUtils.scala b/src/main/scala/com/linkedin/feathr/offline/util/FeaturizedDatasetUtils.scala index d672cf5f5..534881f7a 100644 --- a/src/main/scala/com/linkedin/feathr/offline/util/FeaturizedDatasetUtils.scala +++ b/src/main/scala/com/linkedin/feathr/offline/util/FeaturizedDatasetUtils.scala @@ -157,7 +157,7 @@ private[offline] object FeaturizedDatasetUtils { * @return the Quince-FDS struct or primitive */ - def tensorToDataFrameRow(tensor: TensorData, targetDataType: Option[DataType] = None): Any = { + def tensorToFDSDataFrameRow(tensor: TensorData, targetDataType: Option[DataType] = None): Any = { tensor match { case null => null case _ => diff --git a/src/main/scala/com/linkedin/feathr/offline/util/SourceUtils.scala b/src/main/scala/com/linkedin/feathr/offline/util/SourceUtils.scala index 211ef7e46..a70c11fd0 100644 --- a/src/main/scala/com/linkedin/feathr/offline/util/SourceUtils.scala +++ b/src/main/scala/com/linkedin/feathr/offline/util/SourceUtils.scala @@ -9,15 +9,16 @@ import com.linkedin.feathr.common.{AnchorExtractor, DateParam} import com.linkedin.feathr.offline.client.InputData import com.linkedin.feathr.offline.config.location.{DataLocation, SimplePath} import com.linkedin.feathr.offline.generation.SparkIOUtils +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.mvel.{MvelContext, MvelUtils} import com.linkedin.feathr.offline.source.SourceFormatType import com.linkedin.feathr.offline.source.SourceFormatType.SourceFormatType +import com.linkedin.feathr.offline.source.dataloader.DataLoaderHandler import com.linkedin.feathr.offline.source.dataloader.hdfs.FileFormat import com.linkedin.feathr.offline.source.dataloader.jdbc.JdbcUtils import com.linkedin.feathr.offline.source.pathutil.{PathChecker, TimeBasedHdfsPathAnalyzer, TimeBasedHdfsPathGenerator} import com.linkedin.feathr.offline.util.AclCheckUtils.getLatestPath import com.linkedin.feathr.offline.util.datetime.OfflineDateTimeUtils -import com.linkedin.feathr.offline.source.dataloader.DataLoaderHandler import org.apache.avro.generic.GenericData.{Array, Record} import org.apache.avro.generic.{GenericDatumReader, GenericRecord, IndexedRecord} import org.apache.avro.io.DecoderFactory @@ -29,14 +30,13 @@ import org.apache.avro.{Schema, SchemaBuilder} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.io.NullWritable -import org.apache.hadoop.mapreduce.Job import org.apache.hadoop.mapred.JobConf +import org.apache.hadoop.mapreduce.Job import org.apache.log4j.Logger import org.apache.spark.rdd.RDD import org.apache.spark.sql._ import org.apache.spark.sql.avro.SchemaConverters import org.apache.spark.sql.types.StructType -import org.codehaus.jackson.JsonNode import org.joda.time.{Days, Hours, Interval, DateTime => JodaDateTime, DateTimeZone => JodaTimeZone} import org.mvel2.MVEL @@ -234,51 +234,6 @@ private[offline] object SourceUtils { field.defaultVal() } - /* Defines a symmetric relationship for two keys regarding to that target fields, for example - * ( (viewerId, vieweeId), affinity ) <=> ( (vieweedId, viewerId), affinity ), so in the dataset, - * they are only stored once on HDFS, here this operation should generate the full data - */ - def getRDDViewSymmKeys(rawRDD: RDD[_], targetFields: Option[Seq[String]], otherFields: Option[Seq[String]] = None): RDD[_] = { - - val symmKeys = targetFields match { - case Some(v: Seq[String]) => v - case None => - throw new FeathrConfigException( - ErrorLabel.FEATHR_USER_ERROR, - s"Trying to get symmetric RDD view. Symmetric keys are not defined. Please provide targetFields fields.") - } - - if (symmKeys.size != 2) { - throw new FeathrConfigException( - ErrorLabel.FEATHR_USER_ERROR, - s"Trying to get symmetric RDD view. Symmetric keys (targetFields) must have size of two, found ${symmKeys.size}." + - s" Please provide the targetFields.") - } - - val otherFeatures = otherFields match { - case Some(v: Seq[String]) => v - case None => - throw new FeathrConfigException( - ErrorLabel.FEATHR_USER_ERROR, - s"Trying to get symmetric RDD view. Oother feature fields are not defined. Please provide other feature fields.") - } - - val allFields = (otherFeatures ++ symmKeys).distinct - val extractorForFields = extractorForFieldNames(allFields) - - val rddView = rawRDD.flatMap(record => { - val extractedRecord: Map[String, Any] = extractorForFields(record) - val symmKeyVal0 = extractedRecord(symmKeys(0)) - val symmKeyVal1 = extractedRecord(symmKeys(1)) - // to create the symmetric version of the data (swapping the two keys) - // procedure: remove the original keys from the Map and then add the symmetric pairs - val extractedRecordDup = extractedRecord - symmKeys(0) - symmKeys(1) + (symmKeys(0) -> symmKeyVal1, symmKeys(1) -> symmKeyVal0) - Seq(extractedRecord.asJava, extractedRecordDup.asJava) - }) - - rddView - } - /** * Get the needed fact/feature dataset for a feature anchor as a DataFrame. * @param ss Spark Session @@ -435,7 +390,7 @@ private[offline] object SourceUtils { /* * Given a sequence of field names, return the corresponding field, must be the top level */ - private def extractorForFieldNames(allFields: Seq[String]): Any => Map[String, Any] = { + private def extractorForFieldNames(allFields: Seq[String], mvelContext: Option[FeathrExpressionExecutionContext]): Any => Map[String, Any] = { val compiledExpressionMap = allFields .map( fieldName => @@ -446,7 +401,7 @@ private[offline] object SourceUtils { compiledExpressionMap .mapValues(expression => { MvelContext.ensureInitialized() - MvelUtils.executeExpression(expression, record, null) + MvelUtils.executeExpression(expression, record, null, "", mvelContext) }) .collect { case (name, Some(value)) => (name, value) } .toMap @@ -697,11 +652,7 @@ private[offline] object SourceUtils { ss.read.format("csv").option("header", "true").option("delimiter", csvDelimiterOption).load(inputData.inputPath) } case _ => { - if (ss.sparkContext.isLocal){ - getLocalDF(ss, inputData.inputPath, dataLoaderHandlers) - } else { - loadAsDataFrame(ss, SimplePath(inputData.inputPath),dataLoaderHandlers) - } + loadAsDataFrame(ss, SimplePath(inputData.inputPath),dataLoaderHandlers) } } } diff --git a/src/test/java/com/linkedin/feathr/offline/plugins/AlienFeatureValueTypeAdaptor.java b/src/test/java/com/linkedin/feathr/offline/plugins/AlienFeatureValueTypeAdaptor.java index fd771fe73..bbe48f850 100644 --- a/src/test/java/com/linkedin/feathr/offline/plugins/AlienFeatureValueTypeAdaptor.java +++ b/src/test/java/com/linkedin/feathr/offline/plugins/AlienFeatureValueTypeAdaptor.java @@ -4,7 +4,9 @@ import com.linkedin.feathr.common.types.NumericFeatureType; import com.linkedin.feathr.offline.mvel.plugins.FeatureValueTypeAdaptor; -public class AlienFeatureValueTypeAdaptor implements FeatureValueTypeAdaptor { +import java.io.Serializable; + +public class AlienFeatureValueTypeAdaptor implements FeatureValueTypeAdaptor, Serializable { @Override public FeatureValue toFeathrFeatureValue(AlienFeatureValue other) { if (other.isFloat()) { diff --git a/src/test/resources/avro/2022/09/15/part-00000-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro b/src/test/resources/avro/2022/09/15/part-00000-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro new file mode 100644 index 000000000..2823d5087 Binary files /dev/null and b/src/test/resources/avro/2022/09/15/part-00000-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro differ diff --git a/src/test/resources/avro/2022/09/15/part-00001-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro b/src/test/resources/avro/2022/09/15/part-00001-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro new file mode 100644 index 000000000..4b0549fdd Binary files /dev/null and b/src/test/resources/avro/2022/09/15/part-00001-a5fbb15b-11b1-4a96-9fb0-28f7b77de928-c000.avro differ diff --git a/src/test/scala/com/linkedin/feathr/offline/AnchoredFeaturesIntegTest.scala b/src/test/scala/com/linkedin/feathr/offline/AnchoredFeaturesIntegTest.scala index 3ca387b55..3735c0f9f 100644 --- a/src/test/scala/com/linkedin/feathr/offline/AnchoredFeaturesIntegTest.scala +++ b/src/test/scala/com/linkedin/feathr/offline/AnchoredFeaturesIntegTest.scala @@ -5,8 +5,6 @@ import com.linkedin.feathr.common.exception.FeathrConfigException import com.linkedin.feathr.offline.config.location.SimplePath import com.linkedin.feathr.offline.generation.SparkIOUtils import com.linkedin.feathr.offline.job.PreprocessedDataFrameManager -import com.linkedin.feathr.offline.mvel.plugins.FeathrMvelPluginContext -import com.linkedin.feathr.offline.plugins.{AlienFeatureValue, AlienFeatureValueTypeAdaptor} import com.linkedin.feathr.offline.source.dataloader.{AvroJsonDataLoader, CsvDataLoader} import com.linkedin.feathr.offline.util.FeathrTestUtils import org.apache.spark.sql.Row @@ -60,6 +58,16 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { | type: "DENSE_VECTOR" | default: [7,8,9] | } + | ee2: { + | def: "c" + | type: { + | type: TENSOR + | tensorCategory: DENSE + | dimensionType: [INT] + | valType: FLOAT + | } + | default: [] + | } | ff: { | def: "c" | default: [6,7] @@ -157,7 +165,7 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { */ @Test def testSingleKeyJoinWithDifferentFeatureTypes(): Unit = { - val selectedColumns = Seq("x", "aa", "bb", "cc", "dd", "ee", "ff", "multiply_a_b", "categorical_b") // , "z") + val selectedColumns = Seq("x", "aa", "bb", "cc", "dd", "ee", "ee2", "ff", "multiply_a_b", "categorical_b") // , "z") val featureJoinConf = s""" | @@ -188,6 +196,8 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { null, // ee mutable.WrappedArray.make(Array(7.0f, 8.0f, 9.0f)), + // ee2 + mutable.WrappedArray.empty, // ff mutable.WrappedArray.make(Array(6.0f, 7.0f)), // multiply_a_b @@ -209,6 +219,8 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { mutable.WrappedArray.make(Array(1.0f, 2.0f, 3.0f)), // ee mutable.WrappedArray.make(Array(1.0f, 2.0f, 3.0f)), + // ee2 + mutable.WrappedArray.make(Array(1.0f, 2.0f, 3.0f)), // ff mutable.WrappedArray.make(Array(1.0f, 2.0f, 3.0f)), // multiply_a_b @@ -230,6 +242,8 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { mutable.WrappedArray.make(Array(4.0f, 5.0f, 6.0f)), // ee mutable.WrappedArray.make(Array(4.0f, 5.0f, 6.0f)), + // ee2 + mutable.WrappedArray.make(Array(4.0f, 5.0f, 6.0f)), // ff mutable.WrappedArray.make(Array(4.0f, 5.0f, 6.0f)), // multiply_a_b @@ -248,6 +262,7 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { StructField("cc", FloatType, true), StructField("dd", ArrayType(FloatType, true), true), StructField("ee", ArrayType(FloatType, false), true), + StructField("ee2", ArrayType(FloatType, false), true), StructField("ff", ArrayType(FloatType, false), true), StructField( "multiply_a_b", @@ -469,7 +484,16 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { | |derivations: { | f_trip_time_distance: { - | definition: "f_trip_distance * f_trip_time_duration" + | definition: "f_trip_distance * f_trip_time_duration" + | type: NUMERIC + | } + | f_trip_time_distance_sql: { + | key: [trip] + | inputs: { + | trip_distance: { key: [trip], feature: f_trip_distance } + | trip_time_duration: { key: [trip], feature: f_trip_time_duration } + | } + | definition.sqlExpr: "trip_distance * trip_time_duration" | type: NUMERIC | } |} @@ -499,7 +523,8 @@ class AnchoredFeaturesIntegTest extends FeathrIntegTest { |featureList: [ | { | key: DOLocationID - | featureList: [f_location_avg_fare, f_trip_time_distance, f_trip_distance, f_trip_time_duration, f_is_long_trip_distance, f_day_of_week] + | featureList: [f_location_avg_fare, f_trip_time_distance, f_trip_distance, + | f_trip_time_duration, f_is_long_trip_distance, f_day_of_week, f_trip_time_distance_sql] | } |] """.stripMargin diff --git a/src/test/scala/com/linkedin/feathr/offline/DerivationsIntegTest.scala b/src/test/scala/com/linkedin/feathr/offline/DerivationsIntegTest.scala new file mode 100644 index 000000000..94e92e06d --- /dev/null +++ b/src/test/scala/com/linkedin/feathr/offline/DerivationsIntegTest.scala @@ -0,0 +1,146 @@ +package com.linkedin.feathr.offline + +import com.linkedin.feathr.offline.util.FeathrTestUtils.assertDataFrameApproximatelyEquals +import org.apache.spark.sql.Row +import org.apache.spark.sql.types._ +import org.testng.annotations.Test + +class DerivationsIntegTest extends FeathrIntegTest { + + /** + * Test multi-key derived feature and multi-tagged feature. + * This test covers the following:- + * -> sql based custom extractor + */ + @Test + def testMultiKeyDerivedFeatureDFWithSQL: Unit = { + val df = runLocalFeatureJoinForTest( + joinConfigAsString = """ + | features: [ { + | key: ["concat('',viewer)", viewee] + | featureList: [ "foo_square_distance_sql"] + | } , + | { + | key: [viewee, viewer] + | featureList: [ "foo_square_distance_sql"] + | }, + | { + | key: [viewee, viewer] + | featureList: [ "square_fooFeature_sql"] + | } + | ] + """.stripMargin, + featureDefAsString = """ + | anchors: { + | anchor1: { + | source: anchorAndDerivations/derivations/anchor6-source.csv + | key.sqlExpr: [sourceId, destId] + | features: { + | fooFeature: { + | def.sqlExpr: cast(source as int) + | type: NUMERIC + | } + | } + | } + | } + | derivations: { + | + | square_fooFeature_sql: { + | key: [m1, m2] + | inputs: { + | a: { key: [m1, m2], feature: fooFeature } + | } + | definition.sqlExpr: "a * a" + | } + | foo_square_distance_sql: { + | key: [m1, m2] + | inputs: { + | a1: { key: [m1, m2], feature: square_fooFeature_sql } + | a2: { key: [m2, m1], feature: square_fooFeature_sql } + | } + | definition.sqlExpr: "a1 - a2" + | } + | } + """.stripMargin, + observationDataPath = "anchorAndDerivations/derivations/test2-observations.csv") + + val expectedDf = ss.createDataFrame( + ss.sparkContext.parallelize( + Seq( + Row( + // viewer + "1", + // viewee + "3", + // label + "1.0", + // square_fooFeature_sql + 4.0f, + // viewee_viewer__foo_square_distance_sql + -21.0f, + // concat____viewer__viewee__foo_square_distance_sql + 21.0f), + Row( + // viewer + "2", + // viewee + "1", + // label + "-1.0", + // square_fooFeature_sql + 9.0f, + // viewee_viewer__foo_square_distance_sql + -27.0f, + // concat____viewer__viewee__foo_square_distance_sql + 27.0f), + Row( + // viewer + "3", + // viewee + "6", + // label + "1.0", + // square_fooFeature_sql + null, + // viewee_viewer__foo_square_distance_sql + null, + // concat____viewer__viewee__foo_square_distance_sql + null), + Row( + // viewer + "3", + // viewee + "5", + // label + "-1.0", + // square_fooFeature_sql + null, + // viewee_viewer__foo_square_distance_sql + null, + // concat____viewer__viewee__foo_square_distance_sql + null), + Row( + // viewer + "5", + // viewee + "10", + // label + "1.0", + // square_fooFeature_sql + null, + // viewee_viewer__foo_square_distance_sql + null, + // concat____viewer__viewee__foo_square_distance_sql + null))), + StructType( + List( + StructField("viewer", StringType, true), + StructField("viewee", StringType, true), + StructField("label", StringType, true), + StructField("square_fooFeature_sql", FloatType, true), + StructField("viewee_viewer__foo_square_distance_sql", FloatType, true), + StructField("concat____viewer__viewee__foo_square_distance_sql", FloatType, true)))) + def cmpFunc(row: Row): String = if (row.get(0) != null) row.get(0).toString else "null" + assertDataFrameApproximatelyEquals(df.data, expectedDf, cmpFunc) + } +} diff --git a/src/test/scala/com/linkedin/feathr/offline/FeathrIntegTest.scala b/src/test/scala/com/linkedin/feathr/offline/FeathrIntegTest.scala index dc6078d13..13bf5578e 100644 --- a/src/test/scala/com/linkedin/feathr/offline/FeathrIntegTest.scala +++ b/src/test/scala/com/linkedin/feathr/offline/FeathrIntegTest.scala @@ -2,6 +2,7 @@ package com.linkedin.feathr.offline import com.linkedin.feathr.common.TaggedFeatureName import com.linkedin.feathr.offline.job.{LocalFeatureGenJob, LocalFeatureJoinJob} +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.util.{FeathrTestUtils, SparkFeaturizedDataset} import org.apache.hadoop.conf.Configuration import org.apache.spark.sql.SparkSession @@ -36,8 +37,9 @@ abstract class FeathrIntegTest extends TestFeathr { joinConfigAsString: String, featureDefAsString: String, observationDataPath: String, - extraParams: Array[String] = Array()): SparkFeaturizedDataset = { - LocalFeatureJoinJob.joinWithHoconJoinConfig(joinConfigAsString, featureDefAsString, observationDataPath, extraParams, dataPathHandlers=List()) + extraParams: Array[String] = Array(), + mvelContext: Option[FeathrExpressionExecutionContext] = None): SparkFeaturizedDataset = { + LocalFeatureJoinJob.joinWithHoconJoinConfig(joinConfigAsString, featureDefAsString, observationDataPath, extraParams, dataPathHandlers=List(), mvelContext=mvelContext) } def getOrCreateSparkSession: SparkSession = { diff --git a/src/test/scala/com/linkedin/feathr/offline/TestFeathr.scala b/src/test/scala/com/linkedin/feathr/offline/TestFeathr.scala index bb451c6a5..f052663e3 100644 --- a/src/test/scala/com/linkedin/feathr/offline/TestFeathr.scala +++ b/src/test/scala/com/linkedin/feathr/offline/TestFeathr.scala @@ -4,7 +4,7 @@ import com.linkedin.feathr.common import com.linkedin.feathr.common.JoiningFeatureParams import com.linkedin.feathr.offline.client.FeathrClient import com.linkedin.feathr.offline.config.{FeathrConfig, FeathrConfigLoader} -import com.linkedin.feathr.offline.mvel.plugins.FeathrMvelPluginContext +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.plugins.{AlienFeatureValue, AlienFeatureValueTypeAdaptor} import com.linkedin.feathr.offline.util.FeathrTestUtils import org.apache.avro.generic.GenericRecord @@ -23,14 +23,16 @@ abstract class TestFeathr extends TestNGSuite { protected var feathr: FeathrClient = _ val FeathrFeatureNamePrefix = "__feathr_feature_" protected var feathrConfigLoader: FeathrConfig = FeathrConfigLoader() + + private val mvelContext = new FeathrExpressionExecutionContext() import org.apache.log4j.{Level, Logger} Logger.getLogger("org").setLevel(Level.OFF) Logger.getLogger("akka").setLevel(Level.OFF) @BeforeClass def setup(): Unit = { - FeathrMvelPluginContext.addFeatureTypeAdaptor(classOf[AlienFeatureValue], new AlienFeatureValueTypeAdaptor) setupSpark() + mvelContext.setupExecutorMvelContext(classOf[AlienFeatureValue], new AlienFeatureValueTypeAdaptor(), ss.sparkContext) } /** diff --git a/src/test/scala/com/linkedin/feathr/offline/TestFeathrUdfPlugins.scala b/src/test/scala/com/linkedin/feathr/offline/TestFeathrUdfPlugins.scala index 68ead2408..63637a989 100644 --- a/src/test/scala/com/linkedin/feathr/offline/TestFeathrUdfPlugins.scala +++ b/src/test/scala/com/linkedin/feathr/offline/TestFeathrUdfPlugins.scala @@ -4,7 +4,7 @@ import com.linkedin.feathr.common.FeatureTypes import com.linkedin.feathr.offline.anchored.keyExtractor.AlienSourceKeyExtractorAdaptor import com.linkedin.feathr.offline.client.plugins.FeathrUdfPluginContext import com.linkedin.feathr.offline.derived.AlienDerivationFunctionAdaptor -import com.linkedin.feathr.offline.mvel.plugins.FeathrMvelPluginContext +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.plugins.{AlienFeatureValue, AlienFeatureValueTypeAdaptor} import com.linkedin.feathr.offline.util.FeathrTestUtils import org.apache.spark.sql.Row @@ -16,11 +16,12 @@ class TestFeathrUdfPlugins extends FeathrIntegTest { val MULTILINE_QUOTE = "\"\"\"" + private val mvelContext = new FeathrExpressionExecutionContext() @Test def testMvelUdfPluginSupport: Unit = { - FeathrMvelPluginContext.addFeatureTypeAdaptor(classOf[AlienFeatureValue], new AlienFeatureValueTypeAdaptor()) - FeathrUdfPluginContext.registerUdfAdaptor(new AlienDerivationFunctionAdaptor()) - FeathrUdfPluginContext.registerUdfAdaptor(new AlienSourceKeyExtractorAdaptor()) + mvelContext.setupExecutorMvelContext(classOf[AlienFeatureValue], new AlienFeatureValueTypeAdaptor(), ss.sparkContext) + FeathrUdfPluginContext.registerUdfAdaptor(new AlienDerivationFunctionAdaptor(), ss.sparkContext) + FeathrUdfPluginContext.registerUdfAdaptor(new AlienSourceKeyExtractorAdaptor(), ss.sparkContext) val df = runLocalFeatureJoinForTest( joinConfigAsString = """ | features: { @@ -107,7 +108,8 @@ class TestFeathrUdfPlugins extends FeathrIntegTest { | } |} """.stripMargin, - observationDataPath = "anchorAndDerivations/testMVELLoopExpFeature-observations.csv") + observationDataPath = "anchorAndDerivations/testMVELLoopExpFeature-observations.csv", + mvelContext = Some(mvelContext)) val f8Type = df.fdsMetadata.header.get.featureInfoMap.filter(_._1.getFeatureName == "f8").head._2.featureType.getFeatureType assertEquals(f8Type, FeatureTypes.NUMERIC) diff --git a/src/test/scala/com/linkedin/feathr/offline/client/TestDataFrameColName.scala b/src/test/scala/com/linkedin/feathr/offline/client/TestDataFrameColName.scala index 1f7cf3d5c..540cae74a 100644 --- a/src/test/scala/com/linkedin/feathr/offline/client/TestDataFrameColName.scala +++ b/src/test/scala/com/linkedin/feathr/offline/client/TestDataFrameColName.scala @@ -1,9 +1,12 @@ package com.linkedin.feathr.offline.client -import com.linkedin.feathr.common.{DateParam, JoiningFeatureParams, TaggedFeatureName} +import com.linkedin.feathr.common.{DateParam, FeatureTypeConfig, JoiningFeatureParams, TaggedFeatureName} import com.linkedin.feathr.offline.TestFeathr +import com.linkedin.feathr.offline.anchored.feature.{FeatureAnchor, FeatureAnchorWithSource} import org.apache.spark.sql.Row import org.apache.spark.sql.types.{DoubleType, StringType, StructField, StructType} +import org.mockito.Mockito.when +import org.scalatest.mockito.MockitoSugar.mock import org.testng.Assert.assertEquals import org.testng.annotations.Test @@ -59,4 +62,26 @@ class TestDataFrameColName extends TestFeathr { val taggedFeature3 = new TaggedFeatureName("x", "seq_join_a_names") assertEquals(taggedFeatureToNewColumnNameMap(taggedFeature3)._2, "seq_join_a_names") } + + @Test(description = "Inferred feature type should be honored when user does not provide feature type") + def testGenerateHeader(): Unit = { + val mockFeatureAnchor = mock[FeatureAnchor] + // Mock if the user does not define feature type + when(mockFeatureAnchor.featureTypeConfigs).thenReturn(Map.empty[String, FeatureTypeConfig]) + + val mockFeatureAnchorWithSource = mock[FeatureAnchorWithSource] + when(mockFeatureAnchorWithSource.featureAnchor).thenReturn(mockFeatureAnchor) + val taggedFeatureName = new TaggedFeatureName("id", "f") + val featureToColumnNameMap: Map[TaggedFeatureName, String] = Map(taggedFeatureName -> "f") + val allAnchoredFeatures: Map[String, FeatureAnchorWithSource] = Map("f" -> mockFeatureAnchorWithSource) + // Mock if the type if inferred to be numeric + val inferredFeatureTypeConfigs: Map[String, FeatureTypeConfig] = Map("f" -> FeatureTypeConfig.NUMERIC_TYPE_CONFIG) + val header = DataFrameColName.generateHeader( + featureToColumnNameMap, + allAnchoredFeatures, + Map(), + inferredFeatureTypeConfigs) + // output should be using the inferred type, i.e. numeric + assertEquals(header.featureInfoMap.get(taggedFeatureName).get.featureType, FeatureTypeConfig.NUMERIC_TYPE_CONFIG) + } } diff --git a/src/test/scala/com/linkedin/feathr/offline/derived/TestSequentialJoinAsDerivation.scala b/src/test/scala/com/linkedin/feathr/offline/derived/TestSequentialJoinAsDerivation.scala index fd9f2d147..33e4ac822 100644 --- a/src/test/scala/com/linkedin/feathr/offline/derived/TestSequentialJoinAsDerivation.scala +++ b/src/test/scala/com/linkedin/feathr/offline/derived/TestSequentialJoinAsDerivation.scala @@ -10,6 +10,7 @@ import com.linkedin.feathr.offline.derived.strategies.SequentialJoinAsDerivation import com.linkedin.feathr.offline.job.FeatureTransformation.FEATURE_NAME_PREFIX import com.linkedin.feathr.offline.join.algorithms.{SeqJoinExplodedJoinKeyColumnAppender, SequentialJoinConditionBuilder, SparkJoinWithJoinCondition} import com.linkedin.feathr.offline.logical.FeatureGroups +import com.linkedin.feathr.offline.mvel.plugins.FeathrExpressionExecutionContext import com.linkedin.feathr.offline.{TestFeathr, TestUtils} import org.apache.log4j.{Level, Logger} import org.apache.spark.SparkException @@ -25,7 +26,7 @@ import org.apache.spark.sql.internal.SQLConf class TestSequentialJoinAsDerivation extends TestFeathr with MockitoSugar { Logger.getLogger("org").setLevel(Level.OFF) Logger.getLogger("akka").setLevel(Level.OFF) - + val mvelContext = new FeathrExpressionExecutionContext() private def getSampleEmployeeDF = { val schema = { StructType( @@ -997,7 +998,7 @@ class TestSequentialJoinAsDerivation extends TestFeathr with MockitoSugar { when(mockBaseTaggedDependency.outputKey).thenReturn(Some(Seq("outputKey1", "outputKey2"))) when(mockTaggedDependency.key).thenReturn(Seq("expansionKey1")) - seqJoinDerivations(Seq(0, 1, 2), Seq("keyTag1", "keyTag2", "keyTag3", "keyTag4"), ss.emptyDataFrame, mockDerivedFeature, mockDerivationFunction) + seqJoinDerivations(Seq(0, 1, 2), Seq("keyTag1", "keyTag2", "keyTag3", "keyTag4"), ss.emptyDataFrame, mockDerivedFeature, mockDerivationFunction, Some(mvelContext)) } /** @@ -1071,7 +1072,7 @@ class TestSequentialJoinAsDerivation extends TestFeathr with MockitoSugar { when(mockBaseTaggedDependency.outputKey).thenReturn(Some(Seq("outputKey1"))) when(mockTaggedDependency.key).thenReturn(Seq("expansionKey1")) - seqJoinDerivations(Seq(0, 1, 2), Seq("keyTag1", "keyTag2", "keyTag3", "keyTag4"), ss.emptyDataFrame, mockDerivedFeature, mockDerivationFunction) + seqJoinDerivations(Seq(0, 1, 2), Seq("keyTag1", "keyTag2", "keyTag3", "keyTag4"), ss.emptyDataFrame, mockDerivedFeature, mockDerivationFunction, Some(mvelContext)) } /** diff --git a/src/test/scala/com/linkedin/feathr/offline/join/workflow/TestAnchoredFeatureJoinStep.scala b/src/test/scala/com/linkedin/feathr/offline/join/workflow/TestAnchoredFeatureJoinStep.scala index dc699276d..f9ec6d50e 100644 --- a/src/test/scala/com/linkedin/feathr/offline/join/workflow/TestAnchoredFeatureJoinStep.scala +++ b/src/test/scala/com/linkedin/feathr/offline/join/workflow/TestAnchoredFeatureJoinStep.scala @@ -50,7 +50,7 @@ class TestAnchoredFeatureJoinStep extends TestFeathr with MockitoSugar { val mockAnchorStepInput = mock[AnchorJoinStepInput] when(mockAnchorStepInput.observation).thenReturn(ss.emptyDataFrame) val basicAnchoredFeatureJoinStep = - AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder)) + AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder), None) val FeatureDataFrameOutput(FeatureDataFrame(outputDF, inferredFeatureType)) = basicAnchoredFeatureJoinStep.joinFeatures(Seq(ErasedEntityTaggedFeature(Seq(0), "featureName1")), mockAnchorStepInput)(mockExecutionContext) @@ -77,7 +77,7 @@ class TestAnchoredFeatureJoinStep extends TestFeathr with MockitoSugar { KeyedTransformedResult(Seq("joinKey1", "joinKey2"), mockTransformedResult), KeyedTransformedResult(Seq("joinKey2", "joinKey3"), mockTransformedResult)) val basicAnchoredFeatureJoinStep = - AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder)) + AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder), None) basicAnchoredFeatureJoinStep.joinFeaturesOnSingleDF( Seq(0), Seq("leftJoinKeyColumn"), @@ -103,7 +103,7 @@ class TestAnchoredFeatureJoinStep extends TestFeathr with MockitoSugar { // observation DF val leftDF = getDefaultDataFrame() val basicAnchoredFeatureJoinStep = - AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder)) + AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, SparkJoinWithJoinCondition(EqualityJoinConditionBuilder), None) val resultDF = basicAnchoredFeatureJoinStep.joinFeaturesOnSingleDF(Seq(0), Seq("x"), leftDF, (Seq("feature1", "feature2"), keyedTransformedResults))( mockExecutionContext) resultDF.show() @@ -141,7 +141,7 @@ class TestAnchoredFeatureJoinStep extends TestFeathr with MockitoSugar { // observation DF val leftDF = getDefaultDataFrame() - val basicAnchoredFeatureJoinStep = AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, mockJoiner) + val basicAnchoredFeatureJoinStep = AnchoredFeatureJoinStep(SqlTransformedLeftJoinKeyColumnAppender, IdentityJoinKeyColumnAppender, mockJoiner, None) val resultDF = basicAnchoredFeatureJoinStep.joinFeaturesOnSingleDF(Seq(0), Seq("x"), leftDF, (Seq("feature1", "feature2"), keyedTransformedResults))( mockExecutionContext) // Verify that the joiner was called by validating an empty DataFrame was indeed returned diff --git a/src/test/scala/com/linkedin/feathr/offline/source/accessor/TestDataSourceAccessor.scala b/src/test/scala/com/linkedin/feathr/offline/source/accessor/TestDataSourceAccessor.scala index b56053c56..ae0939a22 100644 --- a/src/test/scala/com/linkedin/feathr/offline/source/accessor/TestDataSourceAccessor.scala +++ b/src/test/scala/com/linkedin/feathr/offline/source/accessor/TestDataSourceAccessor.scala @@ -3,7 +3,7 @@ package com.linkedin.feathr.offline.source.accessor import com.linkedin.feathr.offline.TestFeathr import com.linkedin.feathr.offline.TestUtils.createDailyInterval import com.linkedin.feathr.offline.source.{DataSource, SourceFormatType} -import org.testng.Assert.assertTrue +import org.testng.Assert.{assertEquals, assertTrue} import org.testng.annotations.{BeforeClass, Test} class TestDataSourceAccessor extends TestFeathr { @@ -62,4 +62,14 @@ class TestDataSourceAccessor extends TestFeathr { val accessor = DataSourceAccessor(ss=ss, source=source, dateIntervalOpt=sourceInterval, expectDatumType=None, failOnMissingPartition = false, dataPathHandlers=List()) assertTrue(accessor.isInstanceOf[NonTimeBasedDataSourceAccessor]) } + + @Test(description = "test loading dataframe with BatchDataLoader having #LATEST in its path") + def testBatchDataLoaderWithLatestPath() : Unit = { + val path = "src/test/resources/avro/#LATEST/#LATEST/#LATEST" + val source = DataSource(path, SourceFormatType.FIXED_PATH) + val accessor = DataSourceAccessor(ss=ss, source=source, dateIntervalOpt=sourceInterval, + expectDatumType=None, failOnMissingPartition = false, dataPathHandlers=List()) + assertTrue(accessor.isInstanceOf[NonTimeBasedDataSourceAccessor]) + assertEquals(accessor.get().count(), 10L) + } } diff --git a/src/test/scala/com/linkedin/feathr/offline/util/TestDataSource.scala b/src/test/scala/com/linkedin/feathr/offline/util/TestDataSource.scala index cb7360d11..e3fb8d244 100644 --- a/src/test/scala/com/linkedin/feathr/offline/util/TestDataSource.scala +++ b/src/test/scala/com/linkedin/feathr/offline/util/TestDataSource.scala @@ -1,9 +1,9 @@ package com.linkedin.feathr.offline.util import java.time._ - import com.linkedin.feathr.offline.TestFeathr import com.linkedin.feathr.offline.TestUtils.createIntervalFromLocalTime +import com.linkedin.feathr.offline.config.location.SimplePath import com.linkedin.feathr.offline.source.accessor.DataSourceAccessor import com.linkedin.feathr.offline.source.{DataSource, SourceFormatType} import com.linkedin.feathr.offline.util.datetime.OfflineDateTimeUtils @@ -82,4 +82,11 @@ class TestDataSource extends TestFeathr { assertEquals(schema.getFields.get(0).name(), "obsCol1") assertEquals(schema.getFields.get(1).name(), "f1NumericType") } + + @Test(description = "Test resolve latest") + def testResolveLatest(): Unit = { + val path = SimplePath("src/test/resources/decayTest/daily/#LATEST/#LATEST/#LATEST") + assertEquals(new DataSource(path, SourceFormatType.FIXED_PATH, None, None).path, + "src/test/resources/decayTest/daily/2019/05/20") + } } diff --git a/src/test/scala/com/linkedin/feathr/offline/util/TestFDSConversionUtil.scala b/src/test/scala/com/linkedin/feathr/offline/util/TestFDSConversionUtil.scala index 3ab94e616..f3b75024e 100644 --- a/src/test/scala/com/linkedin/feathr/offline/util/TestFDSConversionUtil.scala +++ b/src/test/scala/com/linkedin/feathr/offline/util/TestFDSConversionUtil.scala @@ -3,18 +3,17 @@ package com.linkedin.feathr.offline.util import com.linkedin.feathr.common.TensorUtils import com.linkedin.feathr.common.tensor.{TensorType, Tensors} import com.linkedin.feathr.common.types.PrimitiveType - -import java.util -import java.util.Collections import com.linkedin.feathr.offline.AssertFeatureUtils import com.linkedin.feathr.offline.transformation.FDSConversionUtils import org.apache.spark.sql.Row -import org.apache.spark.sql.catalyst.expressions.GenericRow +import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema} import org.apache.spark.sql.types._ import org.scalatest.testng.TestNGSuite import org.testng.Assert.{assertEquals, assertTrue} import org.testng.annotations.{DataProvider, Test} +import java.util +import java.util.Collections import scala.collection.mutable class TestFDSConversionUtil extends TestNGSuite { @@ -141,10 +140,18 @@ class TestFDSConversionUtil extends TestNGSuite { @DataProvider def dataForTestConvertRawValueTo1DFDSDenseTensorRowTz(): Array[Array[Any]] = { + val eleType = StructType( + StructField("group", IntegerType, false) :: + StructField("value", IntegerType, false) :: Nil + ) + val row1 = new GenericRowWithSchema(Array(1, 3), eleType) + val row2 = new GenericRowWithSchema(Array(2, 4), eleType) Array( Array(mutable.WrappedArray.make(Array(2.0f, 6.0f)), util.Arrays.asList(2.0f, 6.0f).toArray), Array(Array(1.1).toList, util.Arrays.asList(1.1).toArray), - Array(Map("a" -> 1.1), util.Arrays.asList(1.1).toArray) + Array(Map("a" -> 1.1), util.Arrays.asList(1.1).toArray), + // Simulate raw value return by SWA feature with groupBy + Array(mutable.WrappedArray.make(Array(row1, row2)), util.Arrays.asList(3, 4).toArray) ) } @Test(dataProvider = "dataForTestConvertRawValueTo1DFDSDenseTensorRowTz") diff --git a/src/test/scala/com/linkedin/feathr/offline/util/TestFeatureValueTypeValidator.scala b/src/test/scala/com/linkedin/feathr/offline/util/TestFeatureValueTypeValidator.scala index 1e9bae9b7..bda25b1cc 100644 --- a/src/test/scala/com/linkedin/feathr/offline/util/TestFeatureValueTypeValidator.scala +++ b/src/test/scala/com/linkedin/feathr/offline/util/TestFeatureValueTypeValidator.scala @@ -45,7 +45,7 @@ class TestFeatureValueTypeValidator extends TestFeathr { new FeatureValue(value, valueFeatureType.asInstanceOf[FeatureTypes]); } val featureTypeConfig = new FeatureTypeConfig(configFeatureTypes.asInstanceOf[FeatureTypes], configTensorType.asInstanceOf[TensorType], null) - FeatureValueTypeValidator.validate(featureValue, featureTypeConfig) + FeatureValueTypeValidator.validate("", featureValue, featureTypeConfig) } @DataProvider(name = "failTestCases") @@ -75,7 +75,7 @@ class TestFeatureValueTypeValidator extends TestFeathr { new FeatureValue(value, valueFeatureType.asInstanceOf[FeatureTypes]); } val featureTypeConfig = new FeatureTypeConfig(configFeatureTypes.asInstanceOf[FeatureTypes], configTensorType.asInstanceOf[TensorType], null) - FeatureValueTypeValidator.validate(featureValue, featureTypeConfig) + FeatureValueTypeValidator.validate("", featureValue, featureTypeConfig) } diff --git a/ui/.editorconfig b/ui/.editorconfig new file mode 100644 index 000000000..b5e435a15 --- /dev/null +++ b/ui/.editorconfig @@ -0,0 +1,10 @@ +# http://editorconfig.org +root = true + +[*] +charset=utf-8 +end_of_line=lf +insert_final_newline=false +indent_style=space +indent_size=2 + diff --git a/ui/.env.development b/ui/.env.development new file mode 100644 index 000000000..0c6c0e061 --- /dev/null +++ b/ui/.env.development @@ -0,0 +1,3 @@ +REACT_APP_AZURE_TENANT_ID=common +REACT_APP_API_ENDPOINT=http://127.0.0.1:8000 +REACT_APP_ENABLE_RBAC=false diff --git a/ui/.eslintrc b/ui/.eslintrc index 2a16ad386..c271bfa24 100644 --- a/ui/.eslintrc +++ b/ui/.eslintrc @@ -4,7 +4,15 @@ "es6": true, "node": true }, - "plugins": ["@typescript-eslint/eslint-plugin"], + "plugins": ["react", "@typescript-eslint/eslint-plugin", "prettier"], + "settings": { + "import/resolver": { + "node": { + "extensions": [".tsx", ".ts", ".jsx", ".js", ".json"] + }, + "typescript": {} + } + }, "extends": [ // https://github.com/eslint/eslint/blob/main/conf/eslint-recommended.js "eslint:recommended", @@ -12,7 +20,8 @@ "react-app", // https://reactjs.org/docs/hooks-rules.html "plugin:react-hooks/recommended", - "prettier" + "plugin:prettier/recommended", + "plugin:json/recommended" ], "parser": "@typescript-eslint/parser", "parserOptions": { @@ -20,7 +29,22 @@ "sourceType": "module" }, "rules": { - "dot-notation": "error" + "dot-notation": "error", + "import/extensions": [ + "error", + "ignorePackages", + { + "ts": "never", + "tsx": "never", + "js": "never", + "jsx": "never" + } + ], + "import/no-extraneous-dependencies": ["error", { "devDependencies": true }], + "import/prefer-default-export": "off", + "import/no-unresolved": "error", + "import/no-dynamic-require": "off", + "import/no-mutable-exports": "warn" }, "overrides": [ { diff --git a/ui/.vscode/settings.json b/ui/.vscode/settings.json index 24fe97ae7..5fffcb522 100644 --- a/ui/.vscode/settings.json +++ b/ui/.vscode/settings.json @@ -4,5 +4,15 @@ }, "editor.defaultFormatter": "esbenp.prettier-vscode", "editor.formatOnSave": true, - "eslint.workingDirectories": [{ "mode": "auto" }] + "eslint.workingDirectories": [ + { + "mode": "auto" + } + ], + "[css]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + } } diff --git a/ui/README.md b/ui/README.md index fb3e708aa..459293d1e 100644 --- a/ui/README.md +++ b/ui/README.md @@ -61,14 +61,19 @@ npm run lint:fix This command will automatically fix all problems that can be fixed, and list the rest problems requires manual fix. -#### Formatting +#### Formatting with Prettier -If Prettier is installed, vscode will pick up configuration from [.prettierrc](.prettierrc) file and automatically format code on save. To format code for entire code base, simply run: +Prettier is an opinionated code formatter for Typescript. It removes all original styling and ensures that all outputted code conforms to a consistent style. If Prettier is installed, vscode will pick up configuration from [.prettierrc](.prettierrc) file and automatically format code on save. To format code for entire code base, simply run: ``` npm run format ``` +In Feathr UI, `npx prettier` is already registered as a npm task. +``` +"format": "npx prettier --write src/**" +``` + #### Formatting automatically on commit [Husky](https://github.com/typicode/husky) is used to lint commit changes as a git hook. Prettier is configured to run on staged files in husky git hook. This prevents anything with formatting errors to be committed. diff --git a/ui/craco.config.js b/ui/craco.config.js new file mode 100644 index 000000000..e44884899 --- /dev/null +++ b/ui/craco.config.js @@ -0,0 +1,79 @@ +const path = require("path"); + +const { loaderByName } = require("@craco/craco"); +const CracoLessPlugin = require("craco-less"); + +const webpack = require("webpack"); + +const packageJson = require("./package.json"); + +const resolve = (dir) => path.resolve(__dirname, dir); + +const currentTime = new Date(); + +module.exports = { + babel: { + plugins: [ + [ + "import", + { + libraryName: "antd", + libraryDirectory: "es", + style: true, + }, + ], + ], + }, + webpack: { + alias: { + "@": resolve("src"), + }, + configure: (webpackConfig, { env, paths }) => { + const index = webpackConfig.plugins.findIndex( + (itme) => itme instanceof webpack.DefinePlugin + ); + + if (index > -1) { + const definePlugin = webpackConfig.plugins[index]; + webpackConfig.plugins.splice( + index, + 1, + new webpack.DefinePlugin({ + "process.env": { + ...definePlugin.definitions["process.env"], + FEATHR_VERSION: JSON.stringify(packageJson.version), + FEATHR_GENERATED_TIME: JSON.stringify(currentTime.toISOString()), + }, + }) + ); + } + + return webpackConfig; + }, + }, + plugins: [ + { + plugin: CracoLessPlugin, + options: { + lessLoaderOptions: { + lessOptions: { + modifyVars: {}, + javascriptEnabled: true, + }, + }, + modifyLessModuleRule(lessModuleRule, context) { + // Configure the file suffix + lessModuleRule.test = /\.module\.less$/; + + // Configure the generated local ident name. + const cssLoader = lessModuleRule.use.find(loaderByName("css-loader")); + cssLoader.options.modules = { + localIdentName: "[local]_[hash:base64:5]", + }; + + return lessModuleRule; + }, + }, + }, + ], +}; diff --git a/ui/package-lock.json b/ui/package-lock.json index b3a0d27d8..f3996256b 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -1,25 +1,30 @@ { "name": "feathr-ui", - "version": "0.1.0", + "version": "0.9.0-rc2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "feathr-ui", - "version": "0.1.0", + "version": "0.9.0-rc2", "dependencies": { + "@ant-design/icons": "^4.7.0", "@azure/msal-browser": "^2.24.0", "@azure/msal-react": "^1.4.0", - "antd": "^4.20.2", + "antd": "^4.23.6", "axios": "^0.27.2", + "classnames": "^2.3.2", "dagre": "^0.8.5", + "dayjs": "^1.11.5", "react": "^17.0.2", "react-dom": "^17.0.2", "react-flow-renderer": "^9.7.4", "react-query": "^3.38.0", + "react-resizable": "^3.0.4", "react-router-dom": "^6.3.0" }, "devDependencies": { + "@craco/craco": "^7.0.0-alpha.8", "@testing-library/jest-dom": "^5.16.3", "@testing-library/react": "^12.1.4", "@testing-library/user-event": "^13.5.0", @@ -28,17 +33,25 @@ "@types/node": "^16.11.26", "@types/react": "^17.0.43", "@types/react-dom": "^17.0.14", + "@types/react-resizable": "^3.0.3", "@typescript-eslint/eslint-plugin": "^5.30.7", "@typescript-eslint/parser": "^5.30.7", + "babel-plugin-import": "^1.13.5", + "craco-less": "^2.1.0-alpha.0", "eslint": "^8.20.0", "eslint-config-prettier": "^8.5.0", + "eslint-import-resolver-typescript": "^3.5.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-json": "^3.1.0", + "eslint-plugin-prettier": "^4.2.1", "eslint-plugin-react-hooks": "^4.6.0", "husky": "^8.0.1", "lint-staged": "^13.0.3", "prettier": "2.7.1", "react-scripts": "5.0.0", "typescript": "^4.6.3", - "web-vitals": "^2.1.4" + "web-vitals": "^2.1.4", + "webpack": "^5.72.0" } }, "node_modules/@ampproject/remapping": { @@ -62,7 +75,8 @@ }, "node_modules/@ant-design/icons": { "version": "4.7.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-4.7.0.tgz", + "integrity": "sha512-aoB4Z7JA431rt6d4u+8xcNPPCrdufSRMUOpxa1ab6mz1JCQZOEVolj2WVs/tDFmN62zzK30mNelEsprLYsSF3g==", "dependencies": { "@ant-design/colors": "^6.0.0", "@ant-design/icons-svg": "^4.2.1", @@ -83,14 +97,15 @@ "license": "MIT" }, "node_modules/@ant-design/react-slick": { - "version": "0.28.4", - "license": "MIT", + "version": "0.29.2", + "resolved": "https://registry.npmjs.org/@ant-design/react-slick/-/react-slick-0.29.2.tgz", + "integrity": "sha512-kgjtKmkGHa19FW21lHnAfyyH9AAoh35pBdcJ53rHmQ3O+cfFHGHnUbj/HFrRNJ5vIts09FKJVAD8RpaC+RaWfA==", "dependencies": { "@babel/runtime": "^7.10.4", "classnames": "^2.2.5", "json2mq": "^0.2.0", "lodash": "^4.17.21", - "resize-observer-polyfill": "^1.5.0" + "resize-observer-polyfill": "^1.5.1" }, "peerDependencies": { "react": ">=16.9.0" @@ -1935,10 +1950,11 @@ } }, "node_modules/@babel/runtime": { - "version": "7.17.9", - "license": "MIT", + "version": "7.20.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.20.0.tgz", + "integrity": "sha512-NDYdls71fTXoU8TZHfbBWg7DiZfNzClcKui/+kyi6ppD2L1qnWW3VV6CjtaBXSUGGhiTWJ6ereOIkUvenif66Q==", "dependencies": { - "regenerator-runtime": "^0.13.4" + "regenerator-runtime": "^0.13.10" }, "engines": { "node": ">=6.9.0" @@ -2006,6 +2022,43 @@ "dev": true, "license": "MIT" }, + "node_modules/@craco/craco": { + "version": "7.0.0-alpha.8", + "resolved": "https://registry.npmjs.org/@craco/craco/-/craco-7.0.0-alpha.8.tgz", + "integrity": "sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw==", + "dev": true, + "dependencies": { + "autoprefixer": "^10.4.12", + "cosmiconfig": "^7.0.1", + "cosmiconfig-typescript-loader": "^4.1.1", + "cross-spawn": "^7.0.3", + "lodash": "^4.17.21", + "semver": "^7.3.7", + "webpack-merge": "^5.8.0" + }, + "bin": { + "craco": "dist/bin/craco.js" + }, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "react-scripts": "^5.0.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/@csstools/normalize.css": { "version": "12.0.0", "dev": true, @@ -2768,6 +2821,32 @@ "node": ">= 8" } }, + "node_modules/@pkgr/utils": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.3.1.tgz", + "integrity": "sha512-wfzX8kc1PMyUILA+1Z/EqoE4UCXGy0iRGMhPwdfae1+f0OXlLqCk+By+aMzgJBzR9AzS4CDizioG6Ss1gvAFJw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "is-glob": "^4.0.3", + "open": "^8.4.0", + "picocolors": "^1.0.0", + "tiny-glob": "^0.2.9", + "tslib": "^2.4.0" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@pkgr/utils/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + }, "node_modules/@pmmmwh/react-refresh-webpack-plugin": { "version": "0.5.5", "dev": true, @@ -3245,6 +3324,34 @@ "node": ">=10.13.0" } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "peer": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz", + "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", + "dev": true, + "peer": true + }, "node_modules/@types/aria-query": { "version": "4.2.2", "dev": true, @@ -3505,6 +3612,15 @@ "redux": "^4.0.0" } }, + "node_modules/@types/react-resizable": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/react-resizable/-/react-resizable-3.0.3.tgz", + "integrity": "sha512-W/QsUOZoXBAIBQNhNm95A5ohoaiUA874lWQytO2UP9dOjp5JHO9+a0cwYNabea7sA12ZDJnGVUFZxcNaNksAWA==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, "node_modules/@types/resolve": { "version": "1.17.1", "dev": true, @@ -4464,52 +4580,53 @@ } }, "node_modules/antd": { - "version": "4.20.2", - "license": "MIT", + "version": "4.23.6", + "resolved": "https://registry.npmjs.org/antd/-/antd-4.23.6.tgz", + "integrity": "sha512-AYH57cWBDe1ChtbnvG8i9dpKG4WnjE3AG0zIKpXByFNnxsr4saV6/19ihE8/ImSGpohN4E2zTXmo7R5/MyVRKQ==", "dependencies": { "@ant-design/colors": "^6.0.0", "@ant-design/icons": "^4.7.0", - "@ant-design/react-slick": "~0.28.1", - "@babel/runtime": "^7.12.5", + "@ant-design/react-slick": "~0.29.1", + "@babel/runtime": "^7.18.3", "@ctrl/tinycolor": "^3.4.0", "classnames": "^2.2.6", "copy-to-clipboard": "^3.2.0", "lodash": "^4.17.21", "memoize-one": "^6.0.0", "moment": "^2.29.2", - "rc-cascader": "~3.5.0", + "rc-cascader": "~3.7.0", "rc-checkbox": "~2.3.0", - "rc-collapse": "~3.1.0", - "rc-dialog": "~8.8.1", - "rc-drawer": "~4.4.2", - "rc-dropdown": "~3.5.0", - "rc-field-form": "~1.26.1", - "rc-image": "~5.6.0", - "rc-input": "~0.0.1-alpha.5", - "rc-input-number": "~7.3.0", - "rc-mentions": "~1.7.0", - "rc-menu": "~9.5.5", - "rc-motion": "^2.5.1", + "rc-collapse": "~3.3.0", + "rc-dialog": "~8.9.0", + "rc-drawer": "~5.1.0", + "rc-dropdown": "~4.0.0", + "rc-field-form": "~1.27.0", + "rc-image": "~5.7.0", + "rc-input": "~0.1.2", + "rc-input-number": "~7.3.9", + "rc-mentions": "~1.10.0", + "rc-menu": "~9.6.3", + "rc-motion": "^2.6.1", "rc-notification": "~4.6.0", - "rc-pagination": "~3.1.9", - "rc-picker": "~2.6.4", - "rc-progress": "~3.2.1", + "rc-pagination": "~3.1.17", + "rc-picker": "~2.6.11", + "rc-progress": "~3.3.2", "rc-rate": "~2.9.0", "rc-resize-observer": "^1.2.0", - "rc-segmented": "~2.1.0 ", - "rc-select": "~14.1.1", + "rc-segmented": "~2.1.0", + "rc-select": "~14.1.13", "rc-slider": "~10.0.0", "rc-steps": "~4.1.0", "rc-switch": "~3.2.0", - "rc-table": "~7.24.0", - "rc-tabs": "~11.13.0", - "rc-textarea": "~0.3.0", - "rc-tooltip": "~5.1.1", - "rc-tree": "~5.5.0", - "rc-tree-select": "~5.3.0", + "rc-table": "~7.26.0", + "rc-tabs": "~12.2.0", + "rc-textarea": "~0.4.5", + "rc-tooltip": "~5.2.0", + "rc-tree": "~5.7.0", + "rc-tree-select": "~5.5.0", "rc-trigger": "^5.2.10", "rc-upload": "~4.3.0", - "rc-util": "^5.20.0", + "rc-util": "^5.22.5", "scroll-into-view-if-needed": "^2.2.25" }, "funding": { @@ -4579,7 +4696,8 @@ }, "node_modules/array-tree-filter": { "version": "2.1.0", - "license": "MIT" + "resolved": "https://registry.npmjs.org/array-tree-filter/-/array-tree-filter-2.1.0.tgz", + "integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" }, "node_modules/array-union": { "version": "2.1.0", @@ -4647,8 +4765,9 @@ "license": "MIT" }, "node_modules/async-validator": { - "version": "4.1.1", - "license": "MIT" + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/async-validator/-/async-validator-4.2.5.tgz", + "integrity": "sha512-7HhHjtERjqlNbZtqNqy2rckN/SpOOlmDliet+lP7k+eKZEjPk3DgyeU9lIXLdeLz0uBbbVp+9Qdow9wJWgwwfg==" }, "node_modules/asynckit": { "version": "0.4.0", @@ -4674,7 +4793,9 @@ } }, "node_modules/autoprefixer": { - "version": "10.4.7", + "version": "10.4.12", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.12.tgz", + "integrity": "sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q==", "dev": true, "funding": [ { @@ -4686,10 +4807,9 @@ "url": "https://tidelift.com/funding/github/npm/autoprefixer" } ], - "license": "MIT", "dependencies": { - "browserslist": "^4.20.3", - "caniuse-lite": "^1.0.30001335", + "browserslist": "^4.21.4", + "caniuse-lite": "^1.0.30001407", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -4836,6 +4956,15 @@ "object.assign": "^4.1.0" } }, + "node_modules/babel-plugin-import": { + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/babel-plugin-import/-/babel-plugin-import-1.13.5.tgz", + "integrity": "sha512-IkqnoV+ov1hdJVofly9pXRJmeDm9EtROfrc5i6eII0Hix2xMs5FEm8FG3ExMvazbnZBbgHIt6qdO8And6lCloQ==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.0.0" + } + }, "node_modules/babel-plugin-istanbul": { "version": "6.1.1", "dev": true, @@ -5146,7 +5275,9 @@ "license": "BSD-2-Clause" }, "node_modules/browserslist": { - "version": "4.20.3", + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", "dev": true, "funding": [ { @@ -5158,13 +5289,11 @@ "url": "https://tidelift.com/funding/github/npm/browserslist" } ], - "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001332", - "electron-to-chromium": "^1.4.118", - "escalade": "^3.1.1", - "node-releases": "^2.0.3", - "picocolors": "^1.0.0" + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" }, "bin": { "browserslist": "cli.js" @@ -5270,7 +5399,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001336", + "version": "1.0.30001422", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001422.tgz", + "integrity": "sha512-hSesn02u1QacQHhaxl/kNMZwqVG35Sz/8DgvmgedxSH8z9UUpcDYSPYgsj3x5dQNRcNp6BwpSfQfVzYUTm+fog==", "dev": true, "funding": [ { @@ -5281,8 +5412,7 @@ "type": "tidelift", "url": "https://tidelift.com/funding/github/npm/caniuse-lite" } - ], - "license": "CC-BY-4.0" + ] }, "node_modules/case-sensitive-paths-webpack-plugin": { "version": "2.4.0", @@ -5385,8 +5515,9 @@ "license": "MIT" }, "node_modules/classnames": { - "version": "2.3.1", - "license": "MIT" + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", + "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" }, "node_modules/clean-css": { "version": "5.3.0", @@ -5484,6 +5615,20 @@ "wrap-ansi": "^7.0.0" } }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/clsx": { "version": "1.1.1", "license": "MIT", @@ -5591,9 +5736,10 @@ "license": "MIT" }, "node_modules/colord": { - "version": "2.9.2", - "dev": true, - "license": "MIT" + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", + "dev": true }, "node_modules/colorette": { "version": "2.0.16", @@ -5757,6 +5903,18 @@ "dev": true, "license": "MIT" }, + "node_modules/copy-anything": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-2.0.6.tgz", + "integrity": "sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==", + "dev": true, + "dependencies": { + "is-what": "^3.14.1" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/copy-to-clipboard": { "version": "3.3.1", "license": "MIT", @@ -5825,6 +5983,43 @@ "node": ">=10" } }, + "node_modules/cosmiconfig-typescript-loader": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz", + "integrity": "sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg==", + "dev": true, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "@types/node": "*", + "cosmiconfig": ">=7", + "ts-node": ">=10", + "typescript": ">=3" + } + }, + "node_modules/craco-less": { + "version": "2.1.0-alpha.0", + "resolved": "https://registry.npmjs.org/craco-less/-/craco-less-2.1.0-alpha.0.tgz", + "integrity": "sha512-1kj9Y7Y06Fbae3SJJtz1OvXsaKxjh0jTOwnvzKWOqrojQZbwC2K/d0dxDRUpHTDkIUmxbdzqMmI4LM9JfthQ6Q==", + "dev": true, + "dependencies": { + "less": "^4.1.1", + "less-loader": "^7.3.0" + }, + "peerDependencies": { + "@craco/craco": ">7.0.0-alpha", + "react-scripts": "^5.0.0" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "peer": true + }, "node_modules/cross-spawn": { "version": "7.0.3", "dev": true, @@ -6342,8 +6537,9 @@ } }, "node_modules/date-fns": { - "version": "2.28.0", - "license": "MIT", + "version": "2.29.3", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.29.3.tgz", + "integrity": "sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA==", "engines": { "node": ">=0.11" }, @@ -6353,8 +6549,9 @@ } }, "node_modules/dayjs": { - "version": "1.11.1", - "license": "MIT" + "version": "1.11.5", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.5.tgz", + "integrity": "sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA==" }, "node_modules/debug": { "version": "4.3.4", @@ -6528,6 +6725,16 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/diff-sequences": { "version": "27.5.1", "dev": true, @@ -6731,9 +6938,10 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.134", - "dev": true, - "license": "ISC" + "version": "1.4.284", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz", + "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==", + "dev": true }, "node_modules/emittery": { "version": "0.8.1", @@ -6768,9 +6976,10 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.9.3", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", "dev": true, - "license": "MIT", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -6787,6 +6996,19 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dev": true, + "optional": true, + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, "node_modules/error-ex": { "version": "1.3.2", "dev": true, @@ -7059,6 +7281,62 @@ "ms": "^2.1.1" } }, + "node_modules/eslint-import-resolver-typescript": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.5.1.tgz", + "integrity": "sha512-U7LUjNJPYjNsHvAUAkt/RU3fcTSpbllA0//35B4eLYTX74frmOepbt7F7J3D1IGtj9k21buOpaqtDd4ZlS/BYQ==", + "dev": true, + "dependencies": { + "debug": "^4.3.4", + "enhanced-resolve": "^5.10.0", + "get-tsconfig": "^4.2.0", + "globby": "^13.1.2", + "is-core-module": "^2.10.0", + "is-glob": "^4.0.3", + "synckit": "^0.8.3" + }, + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*" + } + }, + "node_modules/eslint-import-resolver-typescript/node_modules/globby": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.2.tgz", + "integrity": "sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==", + "dev": true, + "dependencies": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.11", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-import-resolver-typescript/node_modules/slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-module-utils": { "version": "2.7.3", "dev": true, @@ -7159,8 +7437,9 @@ }, "node_modules/eslint-plugin-import": { "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "dev": true, - "license": "MIT", "dependencies": { "array-includes": "^3.1.4", "array.prototype.flat": "^1.2.5", @@ -7230,6 +7509,19 @@ } } }, + "node_modules/eslint-plugin-json": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-json/-/eslint-plugin-json-3.1.0.tgz", + "integrity": "sha512-MrlG2ynFEHe7wDGwbUuFPsaT2b1uhuEFhJ+W1f1u+1C2EkXmTYJp4B1aAdQQ8M+CC3t//N/oRKiIVw14L2HR1g==", + "dev": true, + "dependencies": { + "lodash": "^4.17.21", + "vscode-json-languageservice": "^4.1.6" + }, + "engines": { + "node": ">=12.0" + } + }, "node_modules/eslint-plugin-jsx-a11y": { "version": "6.5.1", "dev": true, @@ -7267,6 +7559,27 @@ "node": ">=6.0" } }, + "node_modules/eslint-plugin-prettier": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", + "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "dev": true, + "dependencies": { + "prettier-linter-helpers": "^1.0.0" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "eslint": ">=7.28.0", + "prettier": ">=2.0.0" + }, + "peerDependenciesMeta": { + "eslint-config-prettier": { + "optional": true + } + } + }, "node_modules/eslint-plugin-react": { "version": "7.29.4", "dev": true, @@ -7716,10 +8029,17 @@ "version": "3.1.3", "license": "MIT" }, + "node_modules/fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, "node_modules/fast-glob": { - "version": "3.2.11", + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", "dev": true, - "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -8249,6 +8569,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-tsconfig": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.2.0.tgz", + "integrity": "sha512-X8u8fREiYOE6S8hLbq99PeykTDoLVnxvF4DjWKJmz9xy2nNRdUcV8ZN9tniJFeKyTU3qnC9lL8n4Chd6LmVKHg==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, "node_modules/glob": { "version": "7.2.0", "license": "ISC", @@ -8326,6 +8655,12 @@ "node": ">=4" } }, + "node_modules/globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==", + "dev": true + }, "node_modules/globby": { "version": "11.1.0", "dev": true, @@ -8345,6 +8680,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==", + "dev": true + }, "node_modules/graceful-fs": { "version": "4.2.10", "dev": true, @@ -8485,11 +8826,6 @@ "wbuf": "^1.1.0" } }, - "node_modules/hpack.js/node_modules/isarray": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/hpack.js/node_modules/readable-stream": { "version": "2.3.7", "dev": true, @@ -8784,6 +9120,19 @@ "node": ">= 4" } }, + "node_modules/image-size": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz", + "integrity": "sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==", + "dev": true, + "optional": true, + "bin": { + "image-size": "bin/image-size.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/immer": { "version": "9.0.12", "dev": true, @@ -8934,9 +9283,10 @@ } }, "node_modules/is-core-module": { - "version": "2.9.0", + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", "dev": true, - "license": "MIT", "dependencies": { "has": "^1.0.3" }, @@ -9064,6 +9414,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "dev": true, @@ -9166,6 +9528,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-what": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-3.14.1.tgz", + "integrity": "sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==", + "dev": true + }, "node_modules/is-wsl": { "version": "2.2.0", "dev": true, @@ -9177,11 +9545,26 @@ "node": ">=8" } }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, "node_modules/isexe": { "version": "2.0.0", "dev": true, "license": "ISC" }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/istanbul-lib-coverage": { "version": "3.2.0", "dev": true, @@ -10525,7 +10908,8 @@ }, "node_modules/json2mq": { "version": "0.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/json2mq/-/json2mq-0.2.0.tgz", + "integrity": "sha512-SzoRg7ux5DWTII9J2qkrZrqV1gt+rTaoufMxEzXbS26Uid0NwaJd123HcoB80TgubEppxxIGdNxCx50fEoEWQA==", "dependencies": { "string-convert": "^0.2.0" } @@ -10541,6 +10925,12 @@ "node": ">=6" } }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true + }, "node_modules/jsonfile": { "version": "6.1.0", "dev": true, @@ -10609,10 +10999,88 @@ "language-subtag-registry": "~0.3.2" } }, - "node_modules/leven": { - "version": "3.1.0", + "node_modules/less": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/less/-/less-4.1.3.tgz", + "integrity": "sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA==", "dev": true, - "license": "MIT", + "dependencies": { + "copy-anything": "^2.0.1", + "parse-node-version": "^1.0.1", + "tslib": "^2.3.0" + }, + "bin": { + "lessc": "bin/lessc" + }, + "engines": { + "node": ">=6" + }, + "optionalDependencies": { + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "needle": "^3.1.0", + "source-map": "~0.6.0" + } + }, + "node_modules/less-loader": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-7.3.0.tgz", + "integrity": "sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg==", + "dev": true, + "dependencies": { + "klona": "^2.0.4", + "loader-utils": "^2.0.0", + "schema-utils": "^3.0.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "less": "^3.5.0 || ^4.0.0", + "webpack": "^4.0.0 || ^5.0.0" + } + }, + "node_modules/less/node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "optional": true, + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/less/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "optional": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/less/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + }, + "node_modules/leven": { + "version": "3.1.0", + "dev": true, + "license": "MIT", "engines": { "node": ">=6" } @@ -10865,9 +11333,10 @@ } }, "node_modules/loader-utils": { - "version": "2.0.2", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.3.tgz", + "integrity": "sha512-THWqIsn8QRnvLl0shHYVBN9syumU8pYWEHPTmkiVGd+7K5eFNVSY6AJhRvgGF70gg1Dz+l/k8WicvFCxdEs60A==", "dev": true, - "license": "MIT", "dependencies": { "big.js": "^5.2.2", "emojis-list": "^3.0.0", @@ -11038,6 +11507,13 @@ "semver": "bin/semver.js" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "peer": true + }, "node_modules/makeerror": { "version": "1.0.12", "dev": true, @@ -11294,6 +11770,47 @@ "dev": true, "license": "MIT" }, + "node_modules/needle": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-3.1.0.tgz", + "integrity": "sha512-gCE9weDhjVGCRqS8dwDR/D3GTAeyXLXuqp7I8EzH6DllZGXSUyxuqqLh+YX9rMAWaaTFyVAg6rHGL25dqvczKw==", + "dev": true, + "optional": true, + "dependencies": { + "debug": "^3.2.6", + "iconv-lite": "^0.6.3", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/needle/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "optional": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/needle/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/negotiator": { "version": "0.6.3", "dev": true, @@ -11335,9 +11852,10 @@ "license": "MIT" }, "node_modules/node-releases": { - "version": "2.0.4", - "dev": true, - "license": "MIT" + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", + "dev": true }, "node_modules/normalize-path": { "version": "3.0.0", @@ -11696,6 +12214,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse-node-version": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz", + "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/parse5": { "version": "6.0.1", "dev": true, @@ -11791,6 +12318,16 @@ "node": ">=0.10" } }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=6" + } + }, "node_modules/pirates": { "version": "4.0.5", "dev": true, @@ -11926,7 +12463,9 @@ } }, "node_modules/postcss": { - "version": "8.4.13", + "version": "8.4.18", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.18.tgz", + "integrity": "sha512-Wi8mWhncLJm11GATDaQKobXSNEYGUHeQLiQqDFG1qQ5UTDPTEvKw0Xt5NsTpktGTwLps3ByrWsBrG0rB8YQ9oA==", "dev": true, "funding": [ { @@ -11938,9 +12477,8 @@ "url": "https://tidelift.com/funding/github/npm/postcss" } ], - "license": "MIT", "dependencies": { - "nanoid": "^3.3.3", + "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" }, @@ -13083,6 +13621,18 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/pretty-bytes": { "version": "5.6.0", "dev": true, @@ -13186,6 +13736,13 @@ "node": ">= 0.10" } }, + "node_modules/prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "dev": true, + "optional": true + }, "node_modules/psl": { "version": "1.8.0", "dev": true, @@ -13315,14 +13872,15 @@ } }, "node_modules/rc-cascader": { - "version": "3.5.0", - "license": "MIT", + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.7.0.tgz", + "integrity": "sha512-SFtGpwmYN7RaWEAGTS4Rkc62ZV/qmQGg/tajr/7mfIkleuu8ro9Hlk6J+aA0x1YS4zlaZBtTcSaXM01QMiEV/A==", "dependencies": { "@babel/runtime": "^7.12.5", "array-tree-filter": "^2.1.0", "classnames": "^2.3.1", "rc-select": "~14.1.0", - "rc-tree": "~5.5.0", + "rc-tree": "~5.7.0", "rc-util": "^5.6.1" }, "peerDependencies": { @@ -13343,8 +13901,9 @@ } }, "node_modules/rc-collapse": { - "version": "3.1.4", - "license": "MIT", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/rc-collapse/-/rc-collapse-3.3.1.tgz", + "integrity": "sha512-cOJfcSe3R8vocrF8T+PgaHDrgeA1tX+lwfhwSj60NX9QVRidsILIbRNDLD6nAzmcvVC5PWiIRiR4S1OobxdhCg==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -13358,8 +13917,9 @@ } }, "node_modules/rc-dialog": { - "version": "8.8.1", - "license": "MIT", + "version": "8.9.0", + "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-8.9.0.tgz", + "integrity": "sha512-Cp0tbJnrvPchJfnwIvOMWmJ4yjX3HWFatO6oBFD1jx8QkgsQCR0p8nUWAKdd3seLJhEC39/v56kZaEjwp9muoQ==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", @@ -13372,11 +13932,14 @@ } }, "node_modules/rc-drawer": { - "version": "4.4.3", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/rc-drawer/-/rc-drawer-5.1.0.tgz", + "integrity": "sha512-pU3Tsn99pxGdYowXehzZbdDVE+4lDXSGb7p8vA9mSmr569oc2Izh4Zw5vLKSe/Xxn2p5MSNbLVqD4tz+pK6SOw==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", - "rc-util": "^5.7.0" + "rc-motion": "^2.6.1", + "rc-util": "^5.21.2" }, "peerDependencies": { "react": ">=16.9.0", @@ -13384,12 +13947,13 @@ } }, "node_modules/rc-dropdown": { - "version": "3.5.2", - "license": "MIT", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-4.0.1.tgz", + "integrity": "sha512-OdpXuOcme1rm45cR0Jzgfl1otzmU4vuBVb+etXM8vcaULGokAKVpKlw8p6xzspG7jGd/XxShvq+N3VNEfk/l5g==", "dependencies": { - "@babel/runtime": "^7.10.1", + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", - "rc-trigger": "^5.0.4", + "rc-trigger": "^5.3.1", "rc-util": "^5.17.0" }, "peerDependencies": { @@ -13398,10 +13962,11 @@ } }, "node_modules/rc-field-form": { - "version": "1.26.3", - "license": "MIT", + "version": "1.27.3", + "resolved": "https://registry.npmjs.org/rc-field-form/-/rc-field-form-1.27.3.tgz", + "integrity": "sha512-HGqxHnmGQgkPApEcikV4qTg3BLPC82uB/cwBDftDt1pYaqitJfSl5TFTTUMKVEJVT5RqJ2Zi68ME1HmIMX2HAw==", "dependencies": { - "@babel/runtime": "^7.8.4", + "@babel/runtime": "^7.18.0", "async-validator": "^4.1.0", "rc-util": "^5.8.0" }, @@ -13414,12 +13979,13 @@ } }, "node_modules/rc-image": { - "version": "5.6.2", - "license": "MIT", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/rc-image/-/rc-image-5.7.1.tgz", + "integrity": "sha512-QyMfdhoUfb5W14plqXSisaYwpdstcLYnB0MjX5ccIK2rydQM9sDPuekQWu500DDGR2dBaIF5vx9XbWkNFK17Fg==", "dependencies": { "@babel/runtime": "^7.11.2", "classnames": "^2.2.6", - "rc-dialog": "~8.8.0", + "rc-dialog": "~8.9.0", "rc-util": "^5.0.6" }, "peerDependencies": { @@ -13428,8 +13994,9 @@ } }, "node_modules/rc-input": { - "version": "0.0.1-alpha.7", - "license": "MIT", + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/rc-input/-/rc-input-0.1.4.tgz", + "integrity": "sha512-FqDdNz+fV2dKNgfXzcSLKvC+jEs1709t7nD+WdfjrdSaOcefpgc7BUJYadc3usaING+b7ediMTfKxuJBsEFbXA==", "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -13441,12 +14008,13 @@ } }, "node_modules/rc-input-number": { - "version": "7.3.4", - "license": "MIT", + "version": "7.3.9", + "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-7.3.9.tgz", + "integrity": "sha512-u0+miS+SATdb6DtssYei2JJ1WuZME+nXaG6XGtR8maNyW5uGDytfDu60OTWLQEb0Anv/AcCzehldV8CKmKyQfA==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", - "rc-util": "^5.9.8" + "rc-util": "^5.23.0" }, "peerDependencies": { "react": ">=16.9.0", @@ -13454,15 +14022,16 @@ } }, "node_modules/rc-mentions": { - "version": "1.7.1", - "license": "MIT", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-1.10.0.tgz", + "integrity": "sha512-oMlYWnwXSxP2NQVlgxOTzuG/u9BUc3ySY78K3/t7MNhJWpZzXTao+/Bic6tyZLuNCO89//hVQJBdaR2rnFQl6Q==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", - "rc-menu": "~9.5.1", - "rc-textarea": "^0.3.0", + "rc-menu": "~9.6.0", + "rc-textarea": "^0.4.0", "rc-trigger": "^5.0.4", - "rc-util": "^5.0.1" + "rc-util": "^5.22.5" }, "peerDependencies": { "react": ">=16.9.0", @@ -13470,8 +14039,9 @@ } }, "node_modules/rc-menu": { - "version": "9.5.5", - "license": "MIT", + "version": "9.6.4", + "resolved": "https://registry.npmjs.org/rc-menu/-/rc-menu-9.6.4.tgz", + "integrity": "sha512-6DiNAjxjVIPLZXHffXxxcyE15d4isRL7iQ1ru4MqYDH2Cqc5bW96wZOdMydFtGLyDdnmEQ9jVvdCE9yliGvzkw==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -13487,8 +14057,9 @@ } }, "node_modules/rc-motion": { - "version": "2.6.0", - "license": "MIT", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.6.2.tgz", + "integrity": "sha512-4w1FaX3dtV749P8GwfS4fYnFG4Rb9pxvCYPc/b2fw1cmlHJWNNgOFIz7ysiD+eOrzJSvnLJWlNQQncpNMXwwpg==", "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -13517,8 +14088,9 @@ } }, "node_modules/rc-overflow": { - "version": "1.2.5", - "license": "MIT", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.2.8.tgz", + "integrity": "sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ==", "dependencies": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -13531,8 +14103,9 @@ } }, "node_modules/rc-pagination": { - "version": "3.1.16", - "license": "MIT", + "version": "3.1.17", + "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-3.1.17.tgz", + "integrity": "sha512-/BQ5UxcBnW28vFAcP2hfh+Xg15W0QZn8TWYwdCApchMH1H0CxiaUUcULP8uXcFM1TygcdKWdt3JqsL9cTAfdkQ==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1" @@ -13543,8 +14116,9 @@ } }, "node_modules/rc-picker": { - "version": "2.6.8", - "license": "MIT", + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.6.11.tgz", + "integrity": "sha512-INJ7ULu+Kj4UgqbcqE8Q+QpMw55xFf9kkyLBHJFk0ihjJpAV4glialRfqHE7k4KX2BWYPQfpILwhwR14x2EiRQ==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", @@ -13564,8 +14138,9 @@ } }, "node_modules/rc-progress": { - "version": "3.2.4", - "license": "MIT", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-3.3.3.tgz", + "integrity": "sha512-MDVNVHzGanYtRy2KKraEaWeZLri2ZHWIRyaE1a9MQ2MuJ09m+Wxj5cfcaoaR6z5iRpHpA59YeUxAlpML8N4PJw==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", @@ -13594,7 +14169,8 @@ }, "node_modules/rc-resize-observer": { "version": "1.2.0", - "license": "MIT", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.2.0.tgz", + "integrity": "sha512-6W+UzT3PyDM0wVCEHfoW3qTHPTvbdSgiA43buiy8PzmeMnfgnDeb9NjdimMXMl3/TcrvvWl5RRVdp+NqcR47pQ==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", @@ -13621,8 +14197,9 @@ } }, "node_modules/rc-select": { - "version": "14.1.2", - "license": "MIT", + "version": "14.1.13", + "resolved": "https://registry.npmjs.org/rc-select/-/rc-select-14.1.13.tgz", + "integrity": "sha512-WMEsC3gTwA1dbzWOdVIXDmWyidYNLq68AwvvUlRROw790uGUly0/vmqDozXrIr0QvN/A3CEULx12o+WtLCAefg==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -13688,13 +14265,14 @@ } }, "node_modules/rc-table": { - "version": "7.24.1", - "license": "MIT", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.26.0.tgz", + "integrity": "sha512-0cD8e6S+DTGAt5nBZQIPFYEaIukn17sfa5uFL98faHlH/whZzD8ii3dbFL4wmUDEL4BLybhYop+QUfZJ4CPvNQ==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", "rc-resize-observer": "^1.1.0", - "rc-util": "^5.14.0", + "rc-util": "^5.22.5", "shallowequal": "^1.1.0" }, "engines": { @@ -13706,13 +14284,15 @@ } }, "node_modules/rc-tabs": { - "version": "11.13.0", - "license": "MIT", + "version": "12.2.1", + "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-12.2.1.tgz", + "integrity": "sha512-09pVv4kN8VFqp6THceEmxOW8PAShQC08hrroeVYP4Y8YBFaP1PIWdyFL01czcbyz5YZFj9flZ7aljMaAl0jLVg==", "dependencies": { "@babel/runtime": "^7.11.2", "classnames": "2.x", - "rc-dropdown": "~3.5.0", - "rc-menu": "~9.5.1", + "rc-dropdown": "~4.0.0", + "rc-menu": "~9.6.0", + "rc-motion": "^2.6.2", "rc-resize-observer": "^1.0.0", "rc-util": "^5.5.0" }, @@ -13725,13 +14305,14 @@ } }, "node_modules/rc-textarea": { - "version": "0.3.7", - "license": "MIT", + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-0.4.6.tgz", + "integrity": "sha512-HEKCu8nouXXayqYelQnhQm8fdH7v92pAQvfVCz+jhIPv2PHTyBxVrmoZJMn3B8cU+wdyuvRGkshngO3/TzBn4w==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", "rc-resize-observer": "^1.0.0", - "rc-util": "^5.7.0", + "rc-util": "^5.24.4", "shallowequal": "^1.1.0" }, "peerDependencies": { @@ -13740,10 +14321,12 @@ } }, "node_modules/rc-tooltip": { - "version": "5.1.1", - "license": "MIT", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/rc-tooltip/-/rc-tooltip-5.2.2.tgz", + "integrity": "sha512-jtQzU/18S6EI3lhSGoDYhPqNpWajMtS5VV/ld1LwyfrDByQpYmw/LW6U7oFXXLukjfDHQ7Ju705A82PRNFWYhg==", "dependencies": { "@babel/runtime": "^7.11.2", + "classnames": "^2.3.1", "rc-trigger": "^5.0.0" }, "peerDependencies": { @@ -13752,14 +14335,15 @@ } }, "node_modules/rc-tree": { - "version": "5.5.0", - "license": "MIT", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/rc-tree/-/rc-tree-5.7.0.tgz", + "integrity": "sha512-F+Ewkv/UcutshnVBMISP+lPdHDlcsL+YH/MQDVWbk+QdkfID7vXiwrHMEZn31+2Rbbm21z/HPceGS8PXGMmnQg==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-motion": "^2.0.1", "rc-util": "^5.16.1", - "rc-virtual-list": "^3.4.2" + "rc-virtual-list": "^3.4.8" }, "engines": { "node": ">=10.x" @@ -13770,13 +14354,14 @@ } }, "node_modules/rc-tree-select": { - "version": "5.3.0", - "license": "MIT", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/rc-tree-select/-/rc-tree-select-5.5.3.tgz", + "integrity": "sha512-gv8KyC6J7f9e50OkGk1ibF7v8vL+iaBnA8Ep/EVlMma2/tGdBQXO9xIvPjX8eQrZL5PjoeTUndNPM3cY3721ng==", "dependencies": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-select": "~14.1.0", - "rc-tree": "~5.5.0", + "rc-tree": "~5.7.0", "rc-util": "^5.16.1" }, "peerDependencies": { @@ -13785,10 +14370,11 @@ } }, "node_modules/rc-trigger": { - "version": "5.2.18", - "license": "MIT", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.3.tgz", + "integrity": "sha512-IC4nuTSAME7RJSgwvHCNDQrIzhvGMKf6NDu5veX+zk1MG7i1UnwTWWthcP9WHw3+FZfP3oZGvkrHFPu/EGkFKw==", "dependencies": { - "@babel/runtime": "^7.11.2", + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", "rc-align": "^4.0.0", "rc-motion": "^2.0.0", @@ -13816,10 +14402,11 @@ } }, "node_modules/rc-util": { - "version": "5.21.2", - "license": "MIT", + "version": "5.24.4", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.24.4.tgz", + "integrity": "sha512-2a4RQnycV9eV7lVZPEJ7QwJRPlZNc06J7CwcwZo4vIHr3PfUqtYgl1EkUV9ETAc6VRRi8XZOMFhYG63whlIC9Q==", "dependencies": { - "@babel/runtime": "^7.12.5", + "@babel/runtime": "^7.18.3", "react-is": "^16.12.0", "shallowequal": "^1.1.0" }, @@ -13829,9 +14416,11 @@ } }, "node_modules/rc-virtual-list": { - "version": "3.4.7", - "license": "MIT", + "version": "3.4.11", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", + "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", "dependencies": { + "@babel/runtime": "^7.20.0", "classnames": "^2.2.6", "rc-resize-observer": "^1.0.0", "rc-util": "^5.15.0" @@ -14052,6 +14641,18 @@ "node": ">=0.10.0" } }, + "node_modules/react-resizable": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/react-resizable/-/react-resizable-3.0.4.tgz", + "integrity": "sha512-StnwmiESiamNzdRHbSSvA65b0ZQJ7eVQpPusrSmcpyGKzC0gojhtO62xxH6YOBmepk9dQTBi9yxidL3W4s3EBA==", + "dependencies": { + "prop-types": "15.x", + "react-draggable": "^4.0.3" + }, + "peerDependencies": { + "react": ">= 16.3" + } + }, "node_modules/react-router": { "version": "6.3.0", "license": "MIT", @@ -14228,8 +14829,9 @@ } }, "node_modules/regenerator-runtime": { - "version": "0.13.9", - "license": "MIT" + "version": "0.13.10", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz", + "integrity": "sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw==" }, "node_modules/regenerator-transform": { "version": "0.15.0", @@ -14962,6 +15564,18 @@ "dev": true, "license": "ISC" }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/shallowequal": { "version": "1.1.0", "license": "MIT" @@ -15231,7 +15845,8 @@ }, "node_modules/string-convert": { "version": "0.2.1", - "license": "MIT" + "resolved": "https://registry.npmjs.org/string-convert/-/string-convert-0.2.1.tgz", + "integrity": "sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A==" }, "node_modules/string-length": { "version": "4.0.2", @@ -15424,9 +16039,10 @@ } }, "node_modules/supports-hyperlinks": { - "version": "2.2.0", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", "dev": true, - "license": "MIT", "dependencies": { "has-flag": "^4.0.0", "supports-color": "^7.0.0" @@ -15538,6 +16154,28 @@ "dev": true, "license": "MIT" }, + "node_modules/synckit": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.4.tgz", + "integrity": "sha512-Dn2ZkzMdSX827QbowGbU/4yjWuvNaCoScLLoMo/yKbu+P4GBR6cRGKZH27k6a9bRzdqcyd1DE96pQtQ6uNkmyw==", + "dev": true, + "dependencies": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/synckit/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + }, "node_modules/tailwindcss": { "version": "3.0.24", "dev": true, @@ -15724,6 +16362,16 @@ "dev": true, "license": "MIT" }, + "node_modules/tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", + "dev": true, + "dependencies": { + "globalyzer": "0.1.0", + "globrex": "^0.1.2" + } + }, "node_modules/tmpl": { "version": "1.0.5", "dev": true, @@ -15794,6 +16442,67 @@ "dev": true, "license": "MIT" }, + "node_modules/ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "dev": true, + "peer": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ts-node/node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "peer": true + }, "node_modules/tsconfig-paths": { "version": "3.14.1", "dev": true, @@ -15996,6 +16705,32 @@ "yarn": "*" } }, + "node_modules/update-browserslist-db": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", + "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist-lint": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, "node_modules/uri-js": { "version": "4.4.1", "dev": true, @@ -16049,6 +16784,13 @@ "dev": true, "license": "MIT" }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "peer": true + }, "node_modules/v8-to-istanbul": { "version": "8.1.1", "dev": true, @@ -16078,6 +16820,43 @@ "node": ">= 0.8" } }, + "node_modules/vscode-json-languageservice": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-4.2.1.tgz", + "integrity": "sha512-xGmv9QIWs2H8obGbWg+sIPI/3/pFgj/5OWBhNzs00BkYQ9UaB2F6JJaGB/2/YOZJ3BvLXQTC4Q7muqU25QgAhA==", + "dev": true, + "dependencies": { + "jsonc-parser": "^3.0.0", + "vscode-languageserver-textdocument": "^1.0.3", + "vscode-languageserver-types": "^3.16.0", + "vscode-nls": "^5.0.0", + "vscode-uri": "^3.0.3" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.7.tgz", + "integrity": "sha512-bFJH7UQxlXT8kKeyiyu41r22jCZXG8kuuVVA33OEJn1diWOZK5n8zBSPZFHVBOu8kXZ6h0LIRhf5UnCo61J4Hg==", + "dev": true + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==", + "dev": true + }, + "node_modules/vscode-nls": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/vscode-nls/-/vscode-nls-5.2.0.tgz", + "integrity": "sha512-RAaHx7B14ZU04EU31pT+rKz2/zSl7xMsfIZuo8pd+KZO6PXtQmpevpq3vxvWNcrGbdmhM/rr5Uw5Mz+NBfhVng==", + "dev": true + }, + "node_modules/vscode-uri": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.6.tgz", + "integrity": "sha512-fmL7V1eiDBFRRnu+gfRWTzyPpNIHJTc4mWnFkwBUmO9U3KPgJAmTx7oxi2bl/Rh6HLdU7+4C9wlj0k2E4AdKFQ==", + "dev": true + }, "node_modules/w3c-hr-time": { "version": "1.0.2", "dev": true, @@ -16137,8 +16916,9 @@ }, "node_modules/webpack": { "version": "5.72.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.72.0.tgz", + "integrity": "sha512-qmSmbspI0Qo5ld49htys8GY9XhS9CGqFoHTsOVAnjBdg0Zn79y135R+k4IR4rKK6+eKaabMhJwiVB7xw0SJu5w==", "dev": true, - "license": "MIT", "dependencies": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", @@ -16357,6 +17137,19 @@ "node": ">=10.13.0" } }, + "node_modules/webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/webpack-sources": { "version": "3.2.3", "dev": true, @@ -16463,6 +17256,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, "node_modules/word-wrap": { "version": "1.2.3", "dev": true, @@ -16813,6 +17612,16 @@ "node": ">=10" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "dev": true, @@ -16842,6 +17651,8 @@ }, "@ant-design/icons": { "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-4.7.0.tgz", + "integrity": "sha512-aoB4Z7JA431rt6d4u+8xcNPPCrdufSRMUOpxa1ab6mz1JCQZOEVolj2WVs/tDFmN62zzK30mNelEsprLYsSF3g==", "requires": { "@ant-design/colors": "^6.0.0", "@ant-design/icons-svg": "^4.2.1", @@ -16854,13 +17665,15 @@ "version": "4.2.1" }, "@ant-design/react-slick": { - "version": "0.28.4", + "version": "0.29.2", + "resolved": "https://registry.npmjs.org/@ant-design/react-slick/-/react-slick-0.29.2.tgz", + "integrity": "sha512-kgjtKmkGHa19FW21lHnAfyyH9AAoh35pBdcJ53rHmQ3O+cfFHGHnUbj/HFrRNJ5vIts09FKJVAD8RpaC+RaWfA==", "requires": { "@babel/runtime": "^7.10.4", "classnames": "^2.2.5", "json2mq": "^0.2.0", "lodash": "^4.17.21", - "resize-observer-polyfill": "^1.5.0" + "resize-observer-polyfill": "^1.5.1" } }, "@apideck/better-ajv-errors": { @@ -17964,9 +18777,11 @@ } }, "@babel/runtime": { - "version": "7.17.9", + "version": "7.20.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.20.0.tgz", + "integrity": "sha512-NDYdls71fTXoU8TZHfbBWg7DiZfNzClcKui/+kyi6ppD2L1qnWW3VV6CjtaBXSUGGhiTWJ6ereOIkUvenif66Q==", "requires": { - "regenerator-runtime": "^0.13.4" + "regenerator-runtime": "^0.13.10" } }, "@babel/runtime-corejs3": { @@ -18014,6 +18829,31 @@ "version": "0.2.3", "dev": true }, + "@craco/craco": { + "version": "7.0.0-alpha.8", + "resolved": "https://registry.npmjs.org/@craco/craco/-/craco-7.0.0-alpha.8.tgz", + "integrity": "sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw==", + "dev": true, + "requires": { + "autoprefixer": "^10.4.12", + "cosmiconfig": "^7.0.1", + "cosmiconfig-typescript-loader": "^4.1.1", + "cross-spawn": "^7.0.3", + "lodash": "^4.17.21", + "semver": "^7.3.7", + "webpack-merge": "^5.8.0" + } + }, + "@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/trace-mapping": "0.3.9" + } + }, "@csstools/normalize.css": { "version": "12.0.0", "dev": true @@ -18513,6 +19353,28 @@ "fastq": "^1.6.0" } }, + "@pkgr/utils": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.3.1.tgz", + "integrity": "sha512-wfzX8kc1PMyUILA+1Z/EqoE4UCXGy0iRGMhPwdfae1+f0OXlLqCk+By+aMzgJBzR9AzS4CDizioG6Ss1gvAFJw==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "is-glob": "^4.0.3", + "open": "^8.4.0", + "picocolors": "^1.0.0", + "tiny-glob": "^0.2.9", + "tslib": "^2.4.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + } + } + }, "@pmmmwh/react-refresh-webpack-plugin": { "version": "0.5.5", "dev": true, @@ -18767,6 +19629,34 @@ "version": "0.2.0", "dev": true }, + "@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true, + "peer": true + }, + "@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "peer": true + }, + "@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "peer": true + }, + "@tsconfig/node16": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz", + "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", + "dev": true, + "peer": true + }, "@types/aria-query": { "version": "4.2.2", "dev": true @@ -18991,6 +19881,15 @@ "redux": "^4.0.0" } }, + "@types/react-resizable": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/react-resizable/-/react-resizable-3.0.3.tgz", + "integrity": "sha512-W/QsUOZoXBAIBQNhNm95A5ohoaiUA874lWQytO2UP9dOjp5JHO9+a0cwYNabea7sA12ZDJnGVUFZxcNaNksAWA==", + "dev": true, + "requires": { + "@types/react": "*" + } + }, "@types/resolve": { "version": "1.17.1", "dev": true, @@ -19571,51 +20470,53 @@ } }, "antd": { - "version": "4.20.2", + "version": "4.23.6", + "resolved": "https://registry.npmjs.org/antd/-/antd-4.23.6.tgz", + "integrity": "sha512-AYH57cWBDe1ChtbnvG8i9dpKG4WnjE3AG0zIKpXByFNnxsr4saV6/19ihE8/ImSGpohN4E2zTXmo7R5/MyVRKQ==", "requires": { "@ant-design/colors": "^6.0.0", "@ant-design/icons": "^4.7.0", - "@ant-design/react-slick": "~0.28.1", - "@babel/runtime": "^7.12.5", + "@ant-design/react-slick": "~0.29.1", + "@babel/runtime": "^7.18.3", "@ctrl/tinycolor": "^3.4.0", "classnames": "^2.2.6", "copy-to-clipboard": "^3.2.0", "lodash": "^4.17.21", "memoize-one": "^6.0.0", "moment": "^2.29.2", - "rc-cascader": "~3.5.0", + "rc-cascader": "~3.7.0", "rc-checkbox": "~2.3.0", - "rc-collapse": "~3.1.0", - "rc-dialog": "~8.8.1", - "rc-drawer": "~4.4.2", - "rc-dropdown": "~3.5.0", - "rc-field-form": "~1.26.1", - "rc-image": "~5.6.0", - "rc-input": "~0.0.1-alpha.5", - "rc-input-number": "~7.3.0", - "rc-mentions": "~1.7.0", - "rc-menu": "~9.5.5", - "rc-motion": "^2.5.1", + "rc-collapse": "~3.3.0", + "rc-dialog": "~8.9.0", + "rc-drawer": "~5.1.0", + "rc-dropdown": "~4.0.0", + "rc-field-form": "~1.27.0", + "rc-image": "~5.7.0", + "rc-input": "~0.1.2", + "rc-input-number": "~7.3.9", + "rc-mentions": "~1.10.0", + "rc-menu": "~9.6.3", + "rc-motion": "^2.6.1", "rc-notification": "~4.6.0", - "rc-pagination": "~3.1.9", - "rc-picker": "~2.6.4", - "rc-progress": "~3.2.1", + "rc-pagination": "~3.1.17", + "rc-picker": "~2.6.11", + "rc-progress": "~3.3.2", "rc-rate": "~2.9.0", "rc-resize-observer": "^1.2.0", - "rc-segmented": "~2.1.0 ", - "rc-select": "~14.1.1", + "rc-segmented": "~2.1.0", + "rc-select": "~14.1.13", "rc-slider": "~10.0.0", "rc-steps": "~4.1.0", "rc-switch": "~3.2.0", - "rc-table": "~7.24.0", - "rc-tabs": "~11.13.0", - "rc-textarea": "~0.3.0", - "rc-tooltip": "~5.1.1", - "rc-tree": "~5.5.0", - "rc-tree-select": "~5.3.0", + "rc-table": "~7.26.0", + "rc-tabs": "~12.2.0", + "rc-textarea": "~0.4.5", + "rc-tooltip": "~5.2.0", + "rc-tree": "~5.7.0", + "rc-tree-select": "~5.5.0", "rc-trigger": "^5.2.10", "rc-upload": "~4.3.0", - "rc-util": "^5.20.0", + "rc-util": "^5.22.5", "scroll-into-view-if-needed": "^2.2.25" } }, @@ -19658,7 +20559,9 @@ } }, "array-tree-filter": { - "version": "2.1.0" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-tree-filter/-/array-tree-filter-2.1.0.tgz", + "integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" }, "array-union": { "version": "2.1.0", @@ -19701,7 +20604,9 @@ "dev": true }, "async-validator": { - "version": "4.1.1" + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/async-validator/-/async-validator-4.2.5.tgz", + "integrity": "sha512-7HhHjtERjqlNbZtqNqy2rckN/SpOOlmDliet+lP7k+eKZEjPk3DgyeU9lIXLdeLz0uBbbVp+9Qdow9wJWgwwfg==" }, "asynckit": { "version": "0.4.0" @@ -19715,11 +20620,13 @@ "dev": true }, "autoprefixer": { - "version": "10.4.7", + "version": "10.4.12", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.12.tgz", + "integrity": "sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q==", "dev": true, "requires": { - "browserslist": "^4.20.3", - "caniuse-lite": "^1.0.30001335", + "browserslist": "^4.21.4", + "caniuse-lite": "^1.0.30001407", "fraction.js": "^4.2.0", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -19818,6 +20725,15 @@ "object.assign": "^4.1.0" } }, + "babel-plugin-import": { + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/babel-plugin-import/-/babel-plugin-import-1.13.5.tgz", + "integrity": "sha512-IkqnoV+ov1hdJVofly9pXRJmeDm9EtROfrc5i6eII0Hix2xMs5FEm8FG3ExMvazbnZBbgHIt6qdO8And6lCloQ==", + "dev": true, + "requires": { + "@babel/helper-module-imports": "^7.0.0" + } + }, "babel-plugin-istanbul": { "version": "6.1.1", "dev": true, @@ -20048,14 +20964,15 @@ "dev": true }, "browserslist": { - "version": "4.20.3", + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001332", - "electron-to-chromium": "^1.4.118", - "escalade": "^3.1.1", - "node-releases": "^2.0.3", - "picocolors": "^1.0.0" + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" } }, "bser": { @@ -20122,7 +21039,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001336", + "version": "1.0.30001422", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001422.tgz", + "integrity": "sha512-hSesn02u1QacQHhaxl/kNMZwqVG35Sz/8DgvmgedxSH8z9UUpcDYSPYgsj3x5dQNRcNp6BwpSfQfVzYUTm+fog==", "dev": true }, "case-sensitive-paths-webpack-plugin": { @@ -20188,7 +21107,9 @@ "version": "5.0.3" }, "classnames": { - "version": "2.3.1" + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", + "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" }, "clean-css": { "version": "5.3.0", @@ -20242,9 +21163,20 @@ "version": "7.0.4", "dev": true, "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "requires": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" } }, "clsx": { @@ -20319,7 +21251,9 @@ "dev": true }, "colord": { - "version": "2.9.2", + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", "dev": true }, "colorette": { @@ -20427,6 +21361,15 @@ "version": "1.0.6", "dev": true }, + "copy-anything": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-2.0.6.tgz", + "integrity": "sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==", + "dev": true, + "requires": { + "is-what": "^3.14.1" + } + }, "copy-to-clipboard": { "version": "3.3.1", "requires": { @@ -20470,6 +21413,30 @@ "yaml": "^1.10.0" } }, + "cosmiconfig-typescript-loader": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz", + "integrity": "sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg==", + "dev": true, + "requires": {} + }, + "craco-less": { + "version": "2.1.0-alpha.0", + "resolved": "https://registry.npmjs.org/craco-less/-/craco-less-2.1.0-alpha.0.tgz", + "integrity": "sha512-1kj9Y7Y06Fbae3SJJtz1OvXsaKxjh0jTOwnvzKWOqrojQZbwC2K/d0dxDRUpHTDkIUmxbdzqMmI4LM9JfthQ6Q==", + "dev": true, + "requires": { + "less": "^4.1.1", + "less-loader": "^7.3.0" + } + }, + "create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "peer": true + }, "cross-spawn": { "version": "7.0.3", "dev": true, @@ -20780,10 +21747,14 @@ } }, "date-fns": { - "version": "2.28.0" + "version": "2.29.3", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.29.3.tgz", + "integrity": "sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA==" }, "dayjs": { - "version": "1.11.1" + "version": "1.11.5", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.5.tgz", + "integrity": "sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA==" }, "debug": { "version": "4.3.4", @@ -20887,6 +21858,13 @@ "version": "1.2.2", "dev": true }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "peer": true + }, "diff-sequences": { "version": "27.5.1", "dev": true @@ -21028,7 +22006,9 @@ } }, "electron-to-chromium": { - "version": "1.4.134", + "version": "1.4.284", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz", + "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==", "dev": true }, "emittery": { @@ -21048,7 +22028,9 @@ "dev": true }, "enhanced-resolve": { - "version": "5.9.3", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", "dev": true, "requires": { "graceful-fs": "^4.2.4", @@ -21059,6 +22041,16 @@ "version": "2.2.0", "dev": true }, + "errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dev": true, + "optional": true, + "requires": { + "prr": "~1.0.1" + } + }, "error-ex": { "version": "1.3.2", "dev": true, @@ -21308,6 +22300,42 @@ } } }, + "eslint-import-resolver-typescript": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.5.1.tgz", + "integrity": "sha512-U7LUjNJPYjNsHvAUAkt/RU3fcTSpbllA0//35B4eLYTX74frmOepbt7F7J3D1IGtj9k21buOpaqtDd4ZlS/BYQ==", + "dev": true, + "requires": { + "debug": "^4.3.4", + "enhanced-resolve": "^5.10.0", + "get-tsconfig": "^4.2.0", + "globby": "^13.1.2", + "is-core-module": "^2.10.0", + "is-glob": "^4.0.3", + "synckit": "^0.8.3" + }, + "dependencies": { + "globby": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.2.tgz", + "integrity": "sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==", + "dev": true, + "requires": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.11", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^4.0.0" + } + }, + "slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "dev": true + } + } + }, "eslint-module-utils": { "version": "2.7.3", "dev": true, @@ -21372,6 +22400,8 @@ }, "eslint-plugin-import": { "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "dev": true, "requires": { "array-includes": "^3.1.4", @@ -21416,6 +22446,16 @@ "@typescript-eslint/experimental-utils": "^5.0.0" } }, + "eslint-plugin-json": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-json/-/eslint-plugin-json-3.1.0.tgz", + "integrity": "sha512-MrlG2ynFEHe7wDGwbUuFPsaT2b1uhuEFhJ+W1f1u+1C2EkXmTYJp4B1aAdQQ8M+CC3t//N/oRKiIVw14L2HR1g==", + "dev": true, + "requires": { + "lodash": "^4.17.21", + "vscode-json-languageservice": "^4.1.6" + } + }, "eslint-plugin-jsx-a11y": { "version": "6.5.1", "dev": true, @@ -21444,6 +22484,15 @@ } } }, + "eslint-plugin-prettier": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz", + "integrity": "sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0" + } + }, "eslint-plugin-react": { "version": "7.29.4", "dev": true, @@ -21678,8 +22727,16 @@ "fast-deep-equal": { "version": "3.1.3" }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, "fast-glob": { - "version": "3.2.11", + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", "dev": true, "requires": { "@nodelib/fs.stat": "^2.0.2", @@ -22012,6 +23069,12 @@ "get-intrinsic": "^1.1.1" } }, + "get-tsconfig": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.2.0.tgz", + "integrity": "sha512-X8u8fREiYOE6S8hLbq99PeykTDoLVnxvF4DjWKJmz9xy2nNRdUcV8ZN9tniJFeKyTU3qnC9lL8n4Chd6LmVKHg==", + "dev": true + }, "glob": { "version": "7.2.0", "requires": { @@ -22063,6 +23126,12 @@ "version": "11.12.0", "dev": true }, + "globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==", + "dev": true + }, "globby": { "version": "11.1.0", "dev": true, @@ -22075,6 +23144,12 @@ "slash": "^3.0.0" } }, + "globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==", + "dev": true + }, "graceful-fs": { "version": "4.2.10", "dev": true @@ -22163,10 +23238,6 @@ "wbuf": "^1.1.0" }, "dependencies": { - "isarray": { - "version": "1.0.0", - "dev": true - }, "readable-stream": { "version": "2.3.7", "dev": true, @@ -22352,6 +23423,13 @@ "version": "5.2.0", "dev": true }, + "image-size": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz", + "integrity": "sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==", + "dev": true, + "optional": true + }, "immer": { "version": "9.0.12", "dev": true @@ -22438,7 +23516,9 @@ "dev": true }, "is-core-module": { - "version": "2.9.0", + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", "dev": true, "requires": { "has": "^1.0.3" @@ -22501,6 +23581,15 @@ "version": "3.0.0", "dev": true }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "requires": { + "isobject": "^3.0.1" + } + }, "is-potential-custom-element-name": { "version": "1.0.1", "dev": true @@ -22557,6 +23646,12 @@ "call-bind": "^1.0.2" } }, + "is-what": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-3.14.1.tgz", + "integrity": "sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==", + "dev": true + }, "is-wsl": { "version": "2.2.0", "dev": true, @@ -22564,10 +23659,22 @@ "is-docker": "^2.0.0" } }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, "isexe": { "version": "2.0.0", "dev": true }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true + }, "istanbul-lib-coverage": { "version": "3.2.0", "dev": true @@ -23499,6 +24606,8 @@ }, "json2mq": { "version": "0.2.0", + "resolved": "https://registry.npmjs.org/json2mq/-/json2mq-0.2.0.tgz", + "integrity": "sha512-SzoRg7ux5DWTII9J2qkrZrqV1gt+rTaoufMxEzXbS26Uid0NwaJd123HcoB80TgubEppxxIGdNxCx50fEoEWQA==", "requires": { "string-convert": "^0.2.0" } @@ -23507,6 +24616,12 @@ "version": "2.2.1", "dev": true }, + "jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true + }, "jsonfile": { "version": "6.1.0", "dev": true, @@ -23550,6 +24665,61 @@ "language-subtag-registry": "~0.3.2" } }, + "less": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/less/-/less-4.1.3.tgz", + "integrity": "sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA==", + "dev": true, + "requires": { + "copy-anything": "^2.0.1", + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "needle": "^3.1.0", + "parse-node-version": "^1.0.1", + "source-map": "~0.6.0", + "tslib": "^2.3.0" + }, + "dependencies": { + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "optional": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "optional": true + }, + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + } + } + }, + "less-loader": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-7.3.0.tgz", + "integrity": "sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg==", + "dev": true, + "requires": { + "klona": "^2.0.4", + "loader-utils": "^2.0.0", + "schema-utils": "^3.0.0" + } + }, "leven": { "version": "3.1.0", "dev": true @@ -23694,7 +24864,9 @@ "dev": true }, "loader-utils": { - "version": "2.0.2", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.3.tgz", + "integrity": "sha512-THWqIsn8QRnvLl0shHYVBN9syumU8pYWEHPTmkiVGd+7K5eFNVSY6AJhRvgGF70gg1Dz+l/k8WicvFCxdEs60A==", "dev": true, "requires": { "big.js": "^5.2.2", @@ -23812,6 +24984,13 @@ } } }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "peer": true + }, "makeerror": { "version": "1.0.12", "dev": true, @@ -23970,6 +25149,40 @@ "version": "1.4.0", "dev": true }, + "needle": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-3.1.0.tgz", + "integrity": "sha512-gCE9weDhjVGCRqS8dwDR/D3GTAeyXLXuqp7I8EzH6DllZGXSUyxuqqLh+YX9rMAWaaTFyVAg6rHGL25dqvczKw==", + "dev": true, + "optional": true, + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.6.3", + "sax": "^1.2.4" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "optional": true, + "requires": { + "ms": "^2.1.1" + } + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + } + } + }, "negotiator": { "version": "0.6.3", "dev": true @@ -24001,7 +25214,9 @@ "dev": true }, "node-releases": { - "version": "2.0.4", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", "dev": true }, "normalize-path": { @@ -24219,6 +25434,12 @@ "lines-and-columns": "^1.1.6" } }, + "parse-node-version": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz", + "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==", + "dev": true + }, "parse5": { "version": "6.0.1", "dev": true @@ -24276,6 +25497,13 @@ "version": "0.6.0", "dev": true }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "optional": true + }, "pirates": { "version": "4.0.5", "dev": true @@ -24361,10 +25589,12 @@ } }, "postcss": { - "version": "8.4.13", + "version": "8.4.18", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.18.tgz", + "integrity": "sha512-Wi8mWhncLJm11GATDaQKobXSNEYGUHeQLiQqDFG1qQ5UTDPTEvKw0Xt5NsTpktGTwLps3ByrWsBrG0rB8YQ9oA==", "dev": true, "requires": { - "nanoid": "^3.3.3", + "nanoid": "^3.3.4", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } @@ -24962,6 +26192,15 @@ "version": "2.7.1", "dev": true }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, "pretty-bytes": { "version": "5.6.0", "dev": true @@ -25034,6 +26273,13 @@ } } }, + "prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "dev": true, + "optional": true + }, "psl": { "version": "1.8.0", "dev": true @@ -25107,13 +26353,15 @@ } }, "rc-cascader": { - "version": "3.5.0", + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.7.0.tgz", + "integrity": "sha512-SFtGpwmYN7RaWEAGTS4Rkc62ZV/qmQGg/tajr/7mfIkleuu8ro9Hlk6J+aA0x1YS4zlaZBtTcSaXM01QMiEV/A==", "requires": { "@babel/runtime": "^7.12.5", "array-tree-filter": "^2.1.0", "classnames": "^2.3.1", "rc-select": "~14.1.0", - "rc-tree": "~5.5.0", + "rc-tree": "~5.7.0", "rc-util": "^5.6.1" } }, @@ -25125,7 +26373,9 @@ } }, "rc-collapse": { - "version": "3.1.4", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/rc-collapse/-/rc-collapse-3.3.1.tgz", + "integrity": "sha512-cOJfcSe3R8vocrF8T+PgaHDrgeA1tX+lwfhwSj60NX9QVRidsILIbRNDLD6nAzmcvVC5PWiIRiR4S1OobxdhCg==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -25135,7 +26385,9 @@ } }, "rc-dialog": { - "version": "8.8.1", + "version": "8.9.0", + "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-8.9.0.tgz", + "integrity": "sha512-Cp0tbJnrvPchJfnwIvOMWmJ4yjX3HWFatO6oBFD1jx8QkgsQCR0p8nUWAKdd3seLJhEC39/v56kZaEjwp9muoQ==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", @@ -25144,41 +26396,52 @@ } }, "rc-drawer": { - "version": "4.4.3", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/rc-drawer/-/rc-drawer-5.1.0.tgz", + "integrity": "sha512-pU3Tsn99pxGdYowXehzZbdDVE+4lDXSGb7p8vA9mSmr569oc2Izh4Zw5vLKSe/Xxn2p5MSNbLVqD4tz+pK6SOw==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", - "rc-util": "^5.7.0" + "rc-motion": "^2.6.1", + "rc-util": "^5.21.2" } }, "rc-dropdown": { - "version": "3.5.2", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-4.0.1.tgz", + "integrity": "sha512-OdpXuOcme1rm45cR0Jzgfl1otzmU4vuBVb+etXM8vcaULGokAKVpKlw8p6xzspG7jGd/XxShvq+N3VNEfk/l5g==", "requires": { - "@babel/runtime": "^7.10.1", + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", - "rc-trigger": "^5.0.4", + "rc-trigger": "^5.3.1", "rc-util": "^5.17.0" } }, "rc-field-form": { - "version": "1.26.3", + "version": "1.27.3", + "resolved": "https://registry.npmjs.org/rc-field-form/-/rc-field-form-1.27.3.tgz", + "integrity": "sha512-HGqxHnmGQgkPApEcikV4qTg3BLPC82uB/cwBDftDt1pYaqitJfSl5TFTTUMKVEJVT5RqJ2Zi68ME1HmIMX2HAw==", "requires": { - "@babel/runtime": "^7.8.4", + "@babel/runtime": "^7.18.0", "async-validator": "^4.1.0", "rc-util": "^5.8.0" } }, "rc-image": { - "version": "5.6.2", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/rc-image/-/rc-image-5.7.1.tgz", + "integrity": "sha512-QyMfdhoUfb5W14plqXSisaYwpdstcLYnB0MjX5ccIK2rydQM9sDPuekQWu500DDGR2dBaIF5vx9XbWkNFK17Fg==", "requires": { "@babel/runtime": "^7.11.2", "classnames": "^2.2.6", - "rc-dialog": "~8.8.0", + "rc-dialog": "~8.9.0", "rc-util": "^5.0.6" } }, "rc-input": { - "version": "0.0.1-alpha.7", + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/rc-input/-/rc-input-0.1.4.tgz", + "integrity": "sha512-FqDdNz+fV2dKNgfXzcSLKvC+jEs1709t7nD+WdfjrdSaOcefpgc7BUJYadc3usaING+b7ediMTfKxuJBsEFbXA==", "requires": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -25186,26 +26449,32 @@ } }, "rc-input-number": { - "version": "7.3.4", + "version": "7.3.9", + "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-7.3.9.tgz", + "integrity": "sha512-u0+miS+SATdb6DtssYei2JJ1WuZME+nXaG6XGtR8maNyW5uGDytfDu60OTWLQEb0Anv/AcCzehldV8CKmKyQfA==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", - "rc-util": "^5.9.8" + "rc-util": "^5.23.0" } }, "rc-mentions": { - "version": "1.7.1", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-1.10.0.tgz", + "integrity": "sha512-oMlYWnwXSxP2NQVlgxOTzuG/u9BUc3ySY78K3/t7MNhJWpZzXTao+/Bic6tyZLuNCO89//hVQJBdaR2rnFQl6Q==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", - "rc-menu": "~9.5.1", - "rc-textarea": "^0.3.0", + "rc-menu": "~9.6.0", + "rc-textarea": "^0.4.0", "rc-trigger": "^5.0.4", - "rc-util": "^5.0.1" + "rc-util": "^5.22.5" } }, "rc-menu": { - "version": "9.5.5", + "version": "9.6.4", + "resolved": "https://registry.npmjs.org/rc-menu/-/rc-menu-9.6.4.tgz", + "integrity": "sha512-6DiNAjxjVIPLZXHffXxxcyE15d4isRL7iQ1ru4MqYDH2Cqc5bW96wZOdMydFtGLyDdnmEQ9jVvdCE9yliGvzkw==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -25217,7 +26486,9 @@ } }, "rc-motion": { - "version": "2.6.0", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.6.2.tgz", + "integrity": "sha512-4w1FaX3dtV749P8GwfS4fYnFG4Rb9pxvCYPc/b2fw1cmlHJWNNgOFIz7ysiD+eOrzJSvnLJWlNQQncpNMXwwpg==", "requires": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -25234,7 +26505,9 @@ } }, "rc-overflow": { - "version": "1.2.5", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.2.8.tgz", + "integrity": "sha512-QJ0UItckWPQ37ZL1dMEBAdY1dhfTXFL9k6oTTcyydVwoUNMnMqCGqnRNA98axSr/OeDKqR6DVFyi8eA5RQI/uQ==", "requires": { "@babel/runtime": "^7.11.1", "classnames": "^2.2.1", @@ -25243,14 +26516,18 @@ } }, "rc-pagination": { - "version": "3.1.16", + "version": "3.1.17", + "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-3.1.17.tgz", + "integrity": "sha512-/BQ5UxcBnW28vFAcP2hfh+Xg15W0QZn8TWYwdCApchMH1H0CxiaUUcULP8uXcFM1TygcdKWdt3JqsL9cTAfdkQ==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1" } }, "rc-picker": { - "version": "2.6.8", + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.6.11.tgz", + "integrity": "sha512-INJ7ULu+Kj4UgqbcqE8Q+QpMw55xFf9kkyLBHJFk0ihjJpAV4glialRfqHE7k4KX2BWYPQfpILwhwR14x2EiRQ==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", @@ -25263,7 +26540,9 @@ } }, "rc-progress": { - "version": "3.2.4", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-3.3.3.tgz", + "integrity": "sha512-MDVNVHzGanYtRy2KKraEaWeZLri2ZHWIRyaE1a9MQ2MuJ09m+Wxj5cfcaoaR6z5iRpHpA59YeUxAlpML8N4PJw==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.6", @@ -25280,6 +26559,8 @@ }, "rc-resize-observer": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.2.0.tgz", + "integrity": "sha512-6W+UzT3PyDM0wVCEHfoW3qTHPTvbdSgiA43buiy8PzmeMnfgnDeb9NjdimMXMl3/TcrvvWl5RRVdp+NqcR47pQ==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", @@ -25297,7 +26578,9 @@ } }, "rc-select": { - "version": "14.1.2", + "version": "14.1.13", + "resolved": "https://registry.npmjs.org/rc-select/-/rc-select-14.1.13.tgz", + "integrity": "sha512-WMEsC3gTwA1dbzWOdVIXDmWyidYNLq68AwvvUlRROw790uGUly0/vmqDozXrIr0QvN/A3CEULx12o+WtLCAefg==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "2.x", @@ -25335,67 +26618,83 @@ } }, "rc-table": { - "version": "7.24.1", + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.26.0.tgz", + "integrity": "sha512-0cD8e6S+DTGAt5nBZQIPFYEaIukn17sfa5uFL98faHlH/whZzD8ii3dbFL4wmUDEL4BLybhYop+QUfZJ4CPvNQ==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.5", "rc-resize-observer": "^1.1.0", - "rc-util": "^5.14.0", + "rc-util": "^5.22.5", "shallowequal": "^1.1.0" } }, "rc-tabs": { - "version": "11.13.0", + "version": "12.2.1", + "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-12.2.1.tgz", + "integrity": "sha512-09pVv4kN8VFqp6THceEmxOW8PAShQC08hrroeVYP4Y8YBFaP1PIWdyFL01czcbyz5YZFj9flZ7aljMaAl0jLVg==", "requires": { "@babel/runtime": "^7.11.2", "classnames": "2.x", - "rc-dropdown": "~3.5.0", - "rc-menu": "~9.5.1", + "rc-dropdown": "~4.0.0", + "rc-menu": "~9.6.0", + "rc-motion": "^2.6.2", "rc-resize-observer": "^1.0.0", "rc-util": "^5.5.0" } }, "rc-textarea": { - "version": "0.3.7", + "version": "0.4.6", + "resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-0.4.6.tgz", + "integrity": "sha512-HEKCu8nouXXayqYelQnhQm8fdH7v92pAQvfVCz+jhIPv2PHTyBxVrmoZJMn3B8cU+wdyuvRGkshngO3/TzBn4w==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "^2.2.1", "rc-resize-observer": "^1.0.0", - "rc-util": "^5.7.0", + "rc-util": "^5.24.4", "shallowequal": "^1.1.0" } }, "rc-tooltip": { - "version": "5.1.1", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/rc-tooltip/-/rc-tooltip-5.2.2.tgz", + "integrity": "sha512-jtQzU/18S6EI3lhSGoDYhPqNpWajMtS5VV/ld1LwyfrDByQpYmw/LW6U7oFXXLukjfDHQ7Ju705A82PRNFWYhg==", "requires": { "@babel/runtime": "^7.11.2", + "classnames": "^2.3.1", "rc-trigger": "^5.0.0" } }, "rc-tree": { - "version": "5.5.0", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/rc-tree/-/rc-tree-5.7.0.tgz", + "integrity": "sha512-F+Ewkv/UcutshnVBMISP+lPdHDlcsL+YH/MQDVWbk+QdkfID7vXiwrHMEZn31+2Rbbm21z/HPceGS8PXGMmnQg==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-motion": "^2.0.1", "rc-util": "^5.16.1", - "rc-virtual-list": "^3.4.2" + "rc-virtual-list": "^3.4.8" } }, "rc-tree-select": { - "version": "5.3.0", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/rc-tree-select/-/rc-tree-select-5.5.3.tgz", + "integrity": "sha512-gv8KyC6J7f9e50OkGk1ibF7v8vL+iaBnA8Ep/EVlMma2/tGdBQXO9xIvPjX8eQrZL5PjoeTUndNPM3cY3721ng==", "requires": { "@babel/runtime": "^7.10.1", "classnames": "2.x", "rc-select": "~14.1.0", - "rc-tree": "~5.5.0", + "rc-tree": "~5.7.0", "rc-util": "^5.16.1" } }, "rc-trigger": { - "version": "5.2.18", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.3.tgz", + "integrity": "sha512-IC4nuTSAME7RJSgwvHCNDQrIzhvGMKf6NDu5veX+zk1MG7i1UnwTWWthcP9WHw3+FZfP3oZGvkrHFPu/EGkFKw==", "requires": { - "@babel/runtime": "^7.11.2", + "@babel/runtime": "^7.18.3", "classnames": "^2.2.6", "rc-align": "^4.0.0", "rc-motion": "^2.0.0", @@ -25411,16 +26710,21 @@ } }, "rc-util": { - "version": "5.21.2", + "version": "5.24.4", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.24.4.tgz", + "integrity": "sha512-2a4RQnycV9eV7lVZPEJ7QwJRPlZNc06J7CwcwZo4vIHr3PfUqtYgl1EkUV9ETAc6VRRi8XZOMFhYG63whlIC9Q==", "requires": { - "@babel/runtime": "^7.12.5", + "@babel/runtime": "^7.18.3", "react-is": "^16.12.0", "shallowequal": "^1.1.0" } }, "rc-virtual-list": { - "version": "3.4.7", + "version": "3.4.11", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.4.11.tgz", + "integrity": "sha512-BvUUH60kkeTBPigN5F89HtGaA5jSP4y2aM6cJ4dk9Y42I9yY+h6i08wF6UKeDcxdfOU8j3I5HxkSS/xA77J3wA==", "requires": { + "@babel/runtime": "^7.20.0", "classnames": "^2.2.6", "rc-resize-observer": "^1.0.0", "rc-util": "^5.15.0" @@ -25556,6 +26860,15 @@ "version": "0.11.0", "dev": true }, + "react-resizable": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/react-resizable/-/react-resizable-3.0.4.tgz", + "integrity": "sha512-StnwmiESiamNzdRHbSSvA65b0ZQJ7eVQpPusrSmcpyGKzC0gojhtO62xxH6YOBmepk9dQTBi9yxidL3W4s3EBA==", + "requires": { + "prop-types": "15.x", + "react-draggable": "^4.0.3" + } + }, "react-router": { "version": "6.3.0", "requires": { @@ -25681,7 +26994,9 @@ } }, "regenerator-runtime": { - "version": "0.13.9" + "version": "0.13.10", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz", + "integrity": "sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw==" }, "regenerator-transform": { "version": "0.15.0", @@ -26160,6 +27475,15 @@ "version": "1.2.0", "dev": true }, + "shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "requires": { + "kind-of": "^6.0.2" + } + }, "shallowequal": { "version": "1.1.0" }, @@ -26340,7 +27664,9 @@ "dev": true }, "string-convert": { - "version": "0.2.1" + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/string-convert/-/string-convert-0.2.1.tgz", + "integrity": "sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A==" }, "string-length": { "version": "4.0.2", @@ -26461,7 +27787,9 @@ } }, "supports-hyperlinks": { - "version": "2.2.0", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", "dev": true, "requires": { "has-flag": "^4.0.0", @@ -26539,6 +27867,24 @@ "version": "3.2.4", "dev": true }, + "synckit": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.4.tgz", + "integrity": "sha512-Dn2ZkzMdSX827QbowGbU/4yjWuvNaCoScLLoMo/yKbu+P4GBR6cRGKZH27k6a9bRzdqcyd1DE96pQtQ6uNkmyw==", + "dev": true, + "requires": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.4.0" + }, + "dependencies": { + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + } + } + }, "tailwindcss": { "version": "3.0.24", "dev": true, @@ -26652,6 +27998,16 @@ "version": "1.1.0", "dev": true }, + "tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", + "dev": true, + "requires": { + "globalyzer": "0.1.0", + "globrex": "^0.1.2" + } + }, "tmpl": { "version": "1.0.5", "dev": true @@ -26700,6 +28056,44 @@ "version": "1.0.1", "dev": true }, + "ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "dev": true, + "peer": true, + "requires": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "dependencies": { + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "peer": true + }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "peer": true + } + } + }, "tsconfig-paths": { "version": "3.14.1", "dev": true, @@ -26824,6 +28218,16 @@ "version": "1.2.0", "dev": true }, + "update-browserslist-db": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", + "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==", + "dev": true, + "requires": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + } + }, "uri-js": { "version": "4.4.1", "dev": true, @@ -26861,6 +28265,13 @@ "version": "2.3.0", "dev": true }, + "v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "peer": true + }, "v8-to-istanbul": { "version": "8.1.1", "dev": true, @@ -26880,6 +28291,43 @@ "version": "1.1.2", "dev": true }, + "vscode-json-languageservice": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-4.2.1.tgz", + "integrity": "sha512-xGmv9QIWs2H8obGbWg+sIPI/3/pFgj/5OWBhNzs00BkYQ9UaB2F6JJaGB/2/YOZJ3BvLXQTC4Q7muqU25QgAhA==", + "dev": true, + "requires": { + "jsonc-parser": "^3.0.0", + "vscode-languageserver-textdocument": "^1.0.3", + "vscode-languageserver-types": "^3.16.0", + "vscode-nls": "^5.0.0", + "vscode-uri": "^3.0.3" + } + }, + "vscode-languageserver-textdocument": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.7.tgz", + "integrity": "sha512-bFJH7UQxlXT8kKeyiyu41r22jCZXG8kuuVVA33OEJn1diWOZK5n8zBSPZFHVBOu8kXZ6h0LIRhf5UnCo61J4Hg==", + "dev": true + }, + "vscode-languageserver-types": { + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==", + "dev": true + }, + "vscode-nls": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/vscode-nls/-/vscode-nls-5.2.0.tgz", + "integrity": "sha512-RAaHx7B14ZU04EU31pT+rKz2/zSl7xMsfIZuo8pd+KZO6PXtQmpevpq3vxvWNcrGbdmhM/rr5Uw5Mz+NBfhVng==", + "dev": true + }, + "vscode-uri": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.6.tgz", + "integrity": "sha512-fmL7V1eiDBFRRnu+gfRWTzyPpNIHJTc4mWnFkwBUmO9U3KPgJAmTx7oxi2bl/Rh6HLdU7+4C9wlj0k2E4AdKFQ==", + "dev": true + }, "w3c-hr-time": { "version": "1.0.2", "dev": true, @@ -26926,6 +28374,8 @@ }, "webpack": { "version": "5.72.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.72.0.tgz", + "integrity": "sha512-qmSmbspI0Qo5ld49htys8GY9XhS9CGqFoHTsOVAnjBdg0Zn79y135R+k4IR4rKK6+eKaabMhJwiVB7xw0SJu5w==", "dev": true, "requires": { "@types/eslint-scope": "^3.7.3", @@ -27074,6 +28524,16 @@ } } }, + "webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "requires": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + } + }, "webpack-sources": { "version": "3.2.3", "dev": true @@ -27133,6 +28593,12 @@ "is-symbol": "^1.0.3" } }, + "wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, "word-wrap": { "version": "1.2.3", "dev": true @@ -27401,6 +28867,13 @@ "version": "20.2.9", "dev": true }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "peer": true + }, "yocto-queue": { "version": "0.1.0", "dev": true diff --git a/ui/package.json b/ui/package.json index 3a211df1f..c467c7b9e 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,20 +1,25 @@ { "name": "feathr-ui", - "version": "0.1.0", + "version": "0.9.0-rc2", "private": true, "dependencies": { + "@ant-design/icons": "^4.7.0", "@azure/msal-browser": "^2.24.0", "@azure/msal-react": "^1.4.0", - "antd": "^4.20.2", + "antd": "^4.23.6", "axios": "^0.27.2", + "classnames": "^2.3.2", "dagre": "^0.8.5", + "dayjs": "^1.11.5", "react": "^17.0.2", "react-dom": "^17.0.2", "react-flow-renderer": "^9.7.4", "react-query": "^3.38.0", + "react-resizable": "^3.0.4", "react-router-dom": "^6.3.0" }, "devDependencies": { + "@craco/craco": "^7.0.0-alpha.8", "@testing-library/jest-dom": "^5.16.3", "@testing-library/react": "^12.1.4", "@testing-library/user-event": "^13.5.0", @@ -23,25 +28,34 @@ "@types/node": "^16.11.26", "@types/react": "^17.0.43", "@types/react-dom": "^17.0.14", + "@types/react-resizable": "^3.0.3", "@typescript-eslint/eslint-plugin": "^5.30.7", "@typescript-eslint/parser": "^5.30.7", + "babel-plugin-import": "^1.13.5", + "craco-less": "^2.1.0-alpha.0", "eslint": "^8.20.0", "eslint-config-prettier": "^8.5.0", + "eslint-import-resolver-typescript": "^3.5.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-json": "^3.1.0", + "eslint-plugin-prettier": "^4.2.1", "eslint-plugin-react-hooks": "^4.6.0", "husky": "^8.0.1", "lint-staged": "^13.0.3", "prettier": "2.7.1", "react-scripts": "5.0.0", "typescript": "^4.6.3", - "web-vitals": "^2.1.4" + "web-vitals": "^2.1.4", + "webpack": "^5.72.0" }, "scripts": { - "start": "react-scripts start", - "build": "react-scripts build", - "test": "react-scripts test", + "start": "craco start", + "build": "craco build", + "test": "craco test", "eject": "react-scripts eject", "lint:fix": "npx eslint --fix --ext ts --ext tsx src/ ", - "format": "npx prettier --write src/**" + "format": "npx prettier --write src/**", + "lintStaged": "lint-staged" }, "browserslist": { "production": [ diff --git a/ui/public/favicon.ico b/ui/public/favicon.ico index a11777cc4..fc2f6ca0f 100644 Binary files a/ui/public/favicon.ico and b/ui/public/favicon.ico differ diff --git a/ui/public/index.html b/ui/public/index.html index 0050dcf77..d0bc57b87 100644 --- a/ui/public/index.html +++ b/ui/public/index.html @@ -9,7 +9,7 @@ name="description" content="Feathr Feature Store Web UI" /> - + Feathr Feature Store diff --git a/ui/public/logo192.png b/ui/public/logo192.png deleted file mode 100644 index fc44b0a37..000000000 Binary files a/ui/public/logo192.png and /dev/null differ diff --git a/ui/public/logo200.png b/ui/public/logo200.png new file mode 100644 index 000000000..254621fb0 Binary files /dev/null and b/ui/public/logo200.png differ diff --git a/ui/public/logo512.png b/ui/public/logo512.png deleted file mode 100644 index a4e47a654..000000000 Binary files a/ui/public/logo512.png and /dev/null differ diff --git a/ui/public/manifest.json b/ui/public/manifest.json index 50a99047f..f6d4ea50a 100644 --- a/ui/public/manifest.json +++ b/ui/public/manifest.json @@ -8,14 +8,9 @@ "type": "image/x-icon" }, { - "src": "logo192.png", + "src": "logo200.png", "type": "image/png", - "sizes": "192x192" - }, - { - "src": "logo512.png", - "type": "image/png", - "sizes": "512x512" + "sizes": "200x200" } ], "start_url": ".", diff --git a/ui/src/api/api.tsx b/ui/src/api/api.tsx index 167bd05ee..6c8b6f665 100644 --- a/ui/src/api/api.tsx +++ b/ui/src/api/api.tsx @@ -32,6 +32,27 @@ export const fetchDataSources = async (project: string) => { }); }; +export const fetchDataSource = async ( + project: string, + dataSourceId: string +) => { + const axios = await authAxios(msalInstance); + return axios + .get( + `${getApiBaseUrl()}/projects/${project}/datasources/${dataSourceId}`, + { + params: { project: project, datasource: dataSourceId }, + } + ) + .then((response) => { + if (response.data.message || response.data.detail) { + return Promise.reject(response.data.message || response.data.detail); + } else { + return response.data; + } + }); +}; + export const fetchProjects = async () => { const axios = await authAxios(msalInstance); return axios @@ -92,33 +113,21 @@ export const fetchFeatureLineages = async (featureId: string) => { // Following are place-holder code export const createFeature = async (feature: Feature) => { const axios = await authAxios(msalInstance); - return axios - .post(`${getApiBaseUrl()}/features`, feature, { - headers: { "Content-Type": "application/json;" }, - params: {}, - }) - .then((response) => { - return response; - }) - .catch((error) => { - return error.response; - }); + return axios.post(`${getApiBaseUrl()}/features`, feature, { + headers: { "Content-Type": "application/json;" }, + params: {}, + }); }; -export const updateFeature = async (feature: Feature, id: string) => { +export const updateFeature = async (feature: Feature, id?: string) => { const axios = await authAxios(msalInstance); - feature.guid = id; - return await axios - .put(`${getApiBaseUrl()}/features/${id}`, feature, { - headers: { "Content-Type": "application/json;" }, - params: {}, - }) - .then((response) => { - return response; - }) - .catch((error) => { - return error.response; - }); + if (id) { + feature.guid = id; + } + return axios.put(`${getApiBaseUrl()}/features/${feature.guid}`, feature, { + headers: { "Content-Type": "application/json;" }, + params: {}, + }); }; export const listUserRole = async () => { @@ -228,6 +237,8 @@ export const authAxios = async (msalInstance: PublicClientApplication) => { if (error.response?.status === 403) { const detail = error.response.data.detail; window.location.href = "/responseErrors/403/" + detail; + } else { + return Promise.reject(error.response.data); } //TODO: handle other response errors } diff --git a/ui/src/app.tsx b/ui/src/app.tsx index be6452636..b3d2b317a 100644 --- a/ui/src/app.tsx +++ b/ui/src/app.tsx @@ -10,6 +10,7 @@ import Features from "./pages/feature/features"; import NewFeature from "./pages/feature/newFeature"; import FeatureDetails from "./pages/feature/featureDetails"; import DataSources from "./pages/dataSource/dataSources"; +import DataSourceDetails from "./pages/dataSource/dataSourceDetails"; import Jobs from "./pages/jobs/jobs"; import Monitoring from "./pages/monitoring/monitoring"; import LineageGraph from "./pages/feature/lineageGraph"; @@ -23,40 +24,50 @@ import { getMsalConfig } from "./utils/utils"; const queryClient = new QueryClient(); const msalClient = getMsalConfig(); + const App = () => { return ( - + - +
- - } /> - } /> - } /> - } /> - } /> - } /> - } - /> - } - /> - } /> - } /> - } /> - } /> - } - /> - + + + } /> + } /> + } /> + } /> + } /> + } /> + } + /> + } + /> + } + /> + } /> + } /> + } /> + } + /> + } + /> + + diff --git a/ui/src/components/CardDescriptions/index.tsx b/ui/src/components/CardDescriptions/index.tsx new file mode 100644 index 000000000..dffdec77d --- /dev/null +++ b/ui/src/components/CardDescriptions/index.tsx @@ -0,0 +1,34 @@ +import React from "react"; +import { Card, Descriptions } from "antd"; + +import { isEmpty } from "@/utils/utils"; + +export interface CardDescriptionsProps { + title?: string; + mapping: any[]; + descriptions: any; +} + +const CardDescriptions = (props: CardDescriptionsProps) => { + const { title, mapping, descriptions } = props; + + return !isEmpty(descriptions) ? ( + + + {mapping.reduce((list: any, item) => { + const value = descriptions?.[item.key]; + if (value) { + list.push( + + {typeof value === "string" ? value : JSON.stringify(value)} + + ); + } + return list; + }, [])} + + + ) : null; +}; + +export default CardDescriptions; diff --git a/ui/src/components/FlowGraph/FlowGraph.tsx b/ui/src/components/FlowGraph/FlowGraph.tsx new file mode 100644 index 000000000..ef3f16033 --- /dev/null +++ b/ui/src/components/FlowGraph/FlowGraph.tsx @@ -0,0 +1,236 @@ +import React, { + MouseEvent as ReactMouseEvent, + forwardRef, + useCallback, + useEffect, + useRef, + useState, +} from "react"; +import ReactFlow, { + ConnectionLineType, + Controls, + Edge, + Node, + Elements, + getIncomers, + getOutgoers, + ReactFlowProvider, + isNode, + OnLoadParams, +} from "react-flow-renderer"; +import { Spin } from "antd"; +import { LoadingOutlined } from "@ant-design/icons"; +import { useSearchParams } from "react-router-dom"; +import cs from "classnames"; +import { FeatureLineage } from "@/models/model"; +import { isFeature, FeatureType } from "@/utils/utils"; +import LineageNode from "./LineageNode"; +import { NodeData, FlowGraphProps } from "./interface"; +import { getElements } from "./utils"; + +import styles from "./index.module.less"; + +const FlowGraphNodeTypes = { + "custom-node": LineageNode, +}; + +const defaultProps: FlowGraphProps = { + project: "", + snapGrid: [15, 15], + featureType: FeatureType.AllNodes, +}; + +const FlowGraph = (props: FlowGraphProps, ref: any) => { + const { + className, + style, + data, + loading, + height, + minHeight, + project, + nodeId, + featureType, + snapGrid, + } = { + ...defaultProps, + ...props, + }; + const [, setURLSearchParams] = useSearchParams(); + const flowRef = useRef(); + const hasReadRef = useRef(false); + const elementRef = useRef>(); + const hasHighlight = useRef(false); + const [elements, setElements] = useState>([]); + + // Reset all node highlight status + const resetHighlight = useCallback(() => { + if ( + elementRef.current && + elementRef.current.length > 0 && + hasHighlight.current + ) { + hasHighlight.current = false; + setElements((state) => { + return state.map((element) => { + if (isNode(element)) { + element.style = { + ...element.style, + opacity: 1, + }; + element.data!.active = false; + } else { + element.animated = false; + } + return element; + }); + }); + } + }, [setElements]); + + // Highlight path of selected node, including all linked up and down stream nodes + const highlightPath = useCallback( + (node: Node) => { + if (elementRef.current && elementRef.current.length > 0) { + hasHighlight.current = true; + setElements((elements) => { + const incomerIds = new Set( + getIncomers(node, elements).map((item) => item.id) + ); + const outgoerIds = new Set( + getOutgoers(node, elements).map((item) => item.id) + ); + + return elements.map((element) => { + if (isNode(element)) { + const highlight = + element.id === node.id || + incomerIds.has(element.id) || + outgoerIds.has(element.id); + element.style = { + ...element.style, + opacity: highlight ? 1 : 0.25, + }; + element.data = { + ...element.data, + active: + element.id === node.id && isFeature(element.data!.subtitle), + }; + } else { + const highlight = + element.source === node.id || element.target === node.id; + const animated = + incomerIds.has(element.source) && + (incomerIds.has(element.target) || node.id === element.target); + + element.animated = highlight || animated; + } + return element; + }); + }); + } + }, + [setElements] + ); + + // Fired when panel is clicked, reset all highlighted path, and remove the nodeId query string in url path. + const onPaneClick = useCallback(() => { + resetHighlight(); + setURLSearchParams({}); + }, [resetHighlight, setURLSearchParams]); + + const onElementClick = useCallback( + (e: ReactMouseEvent, element: Node | Edge) => { + e.stopPropagation(); + if (isNode(element)) { + setURLSearchParams({ + nodeId: element.id, + featureType: element.data!.subtitle, + }); + setTimeout(() => { + highlightPath(element); + }, 0); + } + }, + [highlightPath, setURLSearchParams] + ); + + const handleInit = useCallback( + ( + project: string, + data: FeatureLineage, + featureType?: FeatureType, + nodeId?: string + ) => { + const elements = (elementRef.current = getElements( + project, + data, + featureType + )); + setElements(elements); + if (nodeId) { + const node = elements?.find( + (item) => item.id === nodeId + ) as Node; + if (node) { + highlightPath(node); + } + } + }, + [setElements, highlightPath] + ); + + // Fit the graph to the center of layout view when graph is initialized + const onLoad = (reactFlowInstance: OnLoadParams) => { + flowRef.current = reactFlowInstance; + flowRef.current?.fitView(); + }; + + useEffect(() => { + if (data) { + const type = hasHighlight.current ? FeatureType.AllNodes : featureType; + handleInit(project!, data, type, nodeId); + } + }, [data, project, nodeId, featureType, handleInit]); + + useEffect(() => { + if (elements.length > 0 && !hasReadRef.current) { + hasReadRef.current = true; + setTimeout(() => { + flowRef.current?.fitView(); + }, 0); + } + }, [elements]); + + return ( + } + > + + + + + + + ); +}; + +const FlowGraphComponent = forwardRef(FlowGraph); + +FlowGraphComponent.displayName = "FlowGraph"; + +export default FlowGraphComponent; diff --git a/ui/src/components/FlowGraph/LineageNode.tsx b/ui/src/components/FlowGraph/LineageNode.tsx new file mode 100644 index 000000000..27a99cc4f --- /dev/null +++ b/ui/src/components/FlowGraph/LineageNode.tsx @@ -0,0 +1,57 @@ +import React, { forwardRef, memo } from "react"; +import cs from "classnames"; +import { RightCircleOutlined } from "@ant-design/icons"; +import { useNavigate } from "react-router-dom"; +import { Handle, NodeProps, Position } from "react-flow-renderer"; +import { LineageNodeProps } from "./interface"; + +import styles from "./index.module.less"; + +const LineageNode = (props: LineageNodeProps, ref: any) => { + const navigate = useNavigate(); + + const { label, subtitle, version, borderColor, detialUrl, active } = + props.data; + + const nodeTitle = version ? `${label} (v${version})` : label; + const nodeSubtitle = subtitle.replace("feathr_", ""); + const nodeColorStyle = { + border: `2px solid ${borderColor}`, + }; + + const onNodeIconClick = () => { + if (detialUrl) { + navigate(detialUrl); + } + // `/projects/${project}/features/${featureId}`); + }; + + return ( +
+
+ +
+ {nodeTitle} + {active && ( + + )} +
{nodeSubtitle}
+
+ +
+
+ ); +}; + +const LineageNodeComponent = forwardRef(LineageNode); + +LineageNodeComponent.displayName = "LineageNode"; + +export default memo(LineageNodeComponent); diff --git a/ui/src/components/FlowGraph/index.module.less b/ui/src/components/FlowGraph/index.module.less new file mode 100644 index 000000000..9e69f59d7 --- /dev/null +++ b/ui/src/components/FlowGraph/index.module.less @@ -0,0 +1,43 @@ +.flowGraph { + width: 100%; +} + +.lineageNode { + height: 100%; + + &Active { + overflow: hidden; + border-radius: 0.25rem; + border-width: 2px; + border-style: solid; + --tw-border-opacity: 1; + border-color: rgba(57, 35, 150, var(--tw-border-opacity)); + --tw-bg-opacity: 1; + background-color: rgba(57, 35, 150, var(--tw-bg-opacity)); + --tw-text-opacity: 1; + color: rgba(255, 255, 255, var(--tw-text-opacity)); + opacity: 1; + } + + .box { + padding: 4px 12px 7px; + } + + .title { + font-size: 15px; + font-weight: 700; + } + + .subtitle { + font-size: 10px; + font-style: italic; + text-overflow: ellipsis; + max-width: 135px; + overflow: hidden; + white-space: nowrap; + } + + .navigate { + padding: 4px 12px 7px; + } +} diff --git a/ui/src/components/FlowGraph/index.ts b/ui/src/components/FlowGraph/index.ts new file mode 100644 index 000000000..0f6d659d8 --- /dev/null +++ b/ui/src/components/FlowGraph/index.ts @@ -0,0 +1,5 @@ +import FlowGraph from "./FlowGraph"; + +export * from "./interface"; + +export default FlowGraph; diff --git a/ui/src/components/FlowGraph/interface.ts b/ui/src/components/FlowGraph/interface.ts new file mode 100644 index 000000000..0949dbe97 --- /dev/null +++ b/ui/src/components/FlowGraph/interface.ts @@ -0,0 +1,30 @@ +import { CSSProperties } from "react"; +import { FeatureLineage } from "@/models/model"; +import { FeatureType } from "@/utils/utils"; +import { NodeProps, ReactFlowProps } from "react-flow-renderer"; + +export interface NodeData { + id: string; + label: string; + subtitle: string; + featureId: string; + version: string; + borderColor?: string; + active?: boolean; + detialUrl?: string; +} + +export interface FlowGraphProps { + className?: string; + style?: CSSProperties; + minHeight?: string | number; + height?: string | number; + loading?: boolean; + data?: FeatureLineage; + nodeId?: string; + project?: string; + snapGrid?: ReactFlowProps["snapGrid"]; + featureType?: FeatureType; +} + +export interface LineageNodeProps extends NodeProps {} diff --git a/ui/src/components/FlowGraph/utils.ts b/ui/src/components/FlowGraph/utils.ts new file mode 100644 index 000000000..141962895 --- /dev/null +++ b/ui/src/components/FlowGraph/utils.ts @@ -0,0 +1,192 @@ +import { Feature, FeatureLineage, RelationData } from "@/models/model"; +import { FeatureType, getFeatureDetailUrl } from "@/utils/utils"; +import dagre from "dagre"; +import { + Node, + Edge, + ArrowHeadType, + Position, + Elements, +} from "react-flow-renderer"; +import { NodeData } from "./interface"; + +const featureTypeColors: Record = { + feathr_source_v1: "hsl(315, 100%, 50%)", + feathr_anchor_v1: "hsl(270, 100%, 50%)", + feathr_anchor_feature_v1: "hsl(225, 100%, 50%)", + feathr_derived_feature_v1: "hsl(135, 100%, 50%)", +}; + +const DEFAULT_WIDTH = 20; +const DEFAULT_HEIGHT = 36; + +const generateNode = (project: string, data: Feature): Node => { + return { + id: data.guid, + type: "custom-node", + style: { + border: `2px solid featureTypeColors[data.typeName]`, + }, + position: { + x: 0, + y: 0, + }, + data: { + id: data.guid, + label: data.displayText, + subtitle: data.typeName, + featureId: data.guid, + version: data.version, + borderColor: featureTypeColors[data.typeName], + detialUrl: getFeatureDetailUrl(project, data), + }, + }; +}; + +const generateEdge = ( + data: RelationData, + entityMap: Record +): Edge => { + let { fromEntityId: from, toEntityId: to, relationshipType } = data; + + if (relationshipType === "Consumes") { + [from, to] = [to, from]; + } + const sourceNode = entityMap?.[from]; + const targetNode = entityMap?.[to]; + + return { + id: `e-${from}_${to}`, + source: from, + target: to, + arrowHeadType: ArrowHeadType.ArrowClosed, + data: { + sourceTypeName: sourceNode?.typeName, + targetTypeName: targetNode?.typeName, + }, + }; +}; + +export const getLineageNodes = ( + project: string, + lineageData: FeatureLineage, + featureType: FeatureType +): Node[] => { + const { guidEntityMap } = lineageData; + if (!guidEntityMap) { + return []; + } + + return Object.values(guidEntityMap).reduce( + (nodes: Node[], item: Feature) => { + if ( + item.typeName !== "feathr_workspace_v1" && + (featureType === FeatureType.AllNodes || + item.typeName === featureType || + (featureType === FeatureType.AnchorFeature && + item.typeName === FeatureType.Anchor)) + ) { + nodes.push(generateNode(project, item)); + } + return nodes; + }, + [] as Node[] + ); +}; + +export const getLineageEdge = ( + lineageData: FeatureLineage, + featureType: FeatureType +): Edge[] => { + if (!lineageData.relations || !lineageData.guidEntityMap) { + return []; + } + + return lineageData.relations.reduce((edges: Edge[], item) => { + if (["Consumes", "Contains", "Produces"].includes(item.relationshipType)) { + const edge = generateEdge(item, lineageData.guidEntityMap!); + if ( + edges.findIndex((item) => item.id === edge.id) === -1 && + edge.data.sourceTypeName !== "feathr_workspace_v1" && + (featureType === FeatureType.AllNodes || + (featureType === FeatureType.AnchorFeature && + edge.data.sourceTypeName === FeatureType.Anchor && + edge.data.targetTypeName === FeatureType.AnchorFeature)) + ) { + edges.push(edge); + } + } + + return edges; + }, [] as Edge[]); +}; + +export const getElements = ( + project: string, + lineageData: FeatureLineage, + featureType: FeatureType = FeatureType.AllNodes, + direction = "LR" +) => { + const elements: Elements = []; + + const dagreGraph = new dagre.graphlib.Graph({ compound: true }); + + dagreGraph.setDefaultEdgeLabel(() => ({})); + dagreGraph.setGraph({ rankdir: direction }); + + const isHorizontal = direction === "LR"; + + const nodes = getLineageNodes(project, lineageData, featureType); + let edges = getLineageEdge(lineageData, featureType); + + const anchorEdges = edges.filter((item) => { + return ( + item.data.sourceTypeName === FeatureType.Anchor && + item.data.targetTypeName === FeatureType.AnchorFeature + ); + }); + + edges = edges.reduce((data: any, item) => { + const anchorEdge = anchorEdges.find((i: any) => i.target === item.target); + if (anchorEdge) { + if ( + !( + item.data.sourceTypeName === FeatureType.Source && + item.data.targetTypeName === FeatureType.AnchorFeature + ) + ) { + data.push(item); + } + } else { + data.push(item); + } + return data; + }, []); + + nodes.forEach((item) => { + dagreGraph.setNode(item.id, { + label: item.data!.label, + node: item, + width: item.data!.label.length * 8 + DEFAULT_WIDTH, + height: item.style?.height || DEFAULT_HEIGHT, + }); + elements.push(item); + }); + + edges?.forEach((item: any) => { + dagreGraph.setEdge(item.source, item.target); + elements.push(item); + }); + + dagre.layout(dagreGraph); + + nodes.forEach((item) => { + const nodeWithPosition = dagreGraph.node(item.id); + item.targetPosition = isHorizontal ? Position.Left : Position.Top; + item.sourcePosition = isHorizontal ? Position.Right : Position.Bottom; + item.position.x = nodeWithPosition.x; + item.position.y = nodeWithPosition.y - DEFAULT_HEIGHT / 2; + }); + + return elements; +}; diff --git a/ui/src/components/ProjectsSelect/index.tsx b/ui/src/components/ProjectsSelect/index.tsx new file mode 100644 index 000000000..ca5fddf9f --- /dev/null +++ b/ui/src/components/ProjectsSelect/index.tsx @@ -0,0 +1,51 @@ +import React from "react"; +import { Select } from "antd"; +import { fetchProjects } from "@/api"; +import { useQuery } from "react-query"; + +export interface ProjectsSelectProps { + width?: number; + defaultValue?: string; + onChange?: (value: string) => void; +} + +const ProjectsSelect = (props: ProjectsSelectProps) => { + const { width = 350, defaultValue, onChange, ...restProps } = props; + + const { isLoading, data: options } = useQuery< + { value: string; label: string }[] + >( + ["projectsSelect"], + async () => { + try { + const result = await fetchProjects(); + return result.map((item) => ({ + value: item, + label: item, + })); + } catch (e) { + return Promise.reject(e); + } + }, + { + retry: false, + refetchOnWindowFocus: false, + } + ); + + return ( + + + + + + + + + + + + + + + + + ); +}; + +const FeatureFormComponent = forwardRef(FeatureForm); + +FeatureFormComponent.displayName = "FeatureFormComponent"; + +export default FeatureFormComponent; diff --git a/ui/src/pages/feature/components/FeatureTable/index.tsx b/ui/src/pages/feature/components/FeatureTable/index.tsx new file mode 100644 index 000000000..69e9c1ae6 --- /dev/null +++ b/ui/src/pages/feature/components/FeatureTable/index.tsx @@ -0,0 +1,151 @@ +import React, { forwardRef, useRef } from "react"; +import { Button } from "antd"; +import { useQuery } from "react-query"; +import { useNavigate } from "react-router-dom"; +import { Feature } from "@/models/model"; +import { fetchFeatures } from "@/api"; +import ResizeTable, { ResizeColumnType } from "@/components/ResizeTable"; + +export interface DataSourceTableProps { + project?: string; + keyword?: string; +} + +export interface SearchModel { + scope?: string; + roleName?: string; +} + +const DataSourceTable = (props: DataSourceTableProps, ref: any) => { + const navigate = useNavigate(); + + const { project, keyword } = props; + + const projectRef = useRef(project); + + const getDetialUrl = (guid: string) => { + return `/projects/${projectRef.current}/features/${guid}`; + }; + + const columns: ResizeColumnType[] = [ + { + key: "name", + title: "Name", + ellipsis: true, + width: 200, + render: (record: Feature) => { + return ( + + ); + }, + }, + { + key: "type", + title: "Type", + ellipsis: true, + width: 120, + render: (record: Feature) => { + return record.typeName.replace(/feathr_|_v1/gi, ""); + }, + }, + { + key: "transformation", + title: "Transformation", + width: 220, + render: (record: Feature) => { + const { transformExpr, defExpr } = record.attributes.transformation; + return transformExpr || defExpr; + }, + }, + { + key: "entitykey", + title: "Entity Key", + ellipsis: true, + width: 120, + render: (record: Feature) => { + const key = record.attributes.key && record.attributes.key[0]; + if ("NOT_NEEDED" !== key.keyColumn) { + return `${key.keyColumn} (${key.keyColumnType})`; + } else { + return "N/A"; + } + }, + }, + { + key: "aggregation", + title: "Aggregation", + ellipsis: true, + width: 150, + render: (record: Feature) => { + const { transformation } = record.attributes; + return ( + <> + {transformation.aggFunc && `Type: ${transformation.aggFunc}`} +
+ {transformation.aggFunc && `Window: ${transformation.window}`} + + ); + }, + }, + { + title: "Action", + fixed: "right", + width: 100, + resize: false, + render: (record: Feature) => { + return ( + + ); + }, + }, + ]; + + const { isLoading, data: tableData } = useQuery( + ["dataSources", project, keyword], + async () => { + if (project) { + projectRef.current = project; + return await fetchFeatures(project, 1, 10, keyword || ""); + } else { + return []; + } + }, + { + retry: false, + refetchOnWindowFocus: false, + } + ); + + return ( + + ); +}; + +const DataSourceTableComponent = forwardRef( + DataSourceTable +); + +DataSourceTableComponent.displayName = "DataSourceTableComponent"; + +export default DataSourceTableComponent; diff --git a/ui/src/pages/feature/components/NodeDetails/FeatureNodeDetail.tsx b/ui/src/pages/feature/components/NodeDetails/FeatureNodeDetail.tsx new file mode 100644 index 000000000..0224d1d86 --- /dev/null +++ b/ui/src/pages/feature/components/NodeDetails/FeatureNodeDetail.tsx @@ -0,0 +1,52 @@ +import React from "react"; +import { Space } from "antd"; +import { Feature } from "@/models/model"; +import CardDescriptions from "@/components/CardDescriptions"; +import { + TransformationMap, + FeatureKeyMap, + TypeMap, +} from "@/utils/attributesMapping"; +import { getJSONMap } from "@/utils/utils"; + +export interface FeatureNodeDetialProps { + feature: Feature; +} + +const FeatureNodeDetial = (props: FeatureNodeDetialProps) => { + const { feature } = props; + + const { attributes } = feature; + const { transformation, key, type, tags } = attributes; + + const tagsMap = getJSONMap(tags); + + return ( + + + {key?.map((item, index) => { + return ( + + ); + })} + + + + ); +}; + +export default FeatureNodeDetial; diff --git a/ui/src/pages/feature/components/NodeDetails/SourceNodeDetial.tsx b/ui/src/pages/feature/components/NodeDetails/SourceNodeDetial.tsx new file mode 100644 index 000000000..fbf5be158 --- /dev/null +++ b/ui/src/pages/feature/components/NodeDetails/SourceNodeDetial.tsx @@ -0,0 +1,22 @@ +import React from "react"; +import { DataSource } from "@/models/model"; +import { SourceAttributesMap } from "@/utils/attributesMapping"; +import CardDescriptions from "@/components/CardDescriptions"; + +export interface SourceNodeDetialProps { + source: DataSource; +} + +const SourceNodeDetial = (props: SourceNodeDetialProps) => { + const { source } = props; + const { attributes } = source; + return ( + + ); +}; + +export default SourceNodeDetial; diff --git a/ui/src/pages/feature/components/NodeDetails/index.module.less b/ui/src/pages/feature/components/NodeDetails/index.module.less new file mode 100644 index 000000000..9e9815d75 --- /dev/null +++ b/ui/src/pages/feature/components/NodeDetails/index.module.less @@ -0,0 +1,10 @@ +.wrap { + :global { + .ant-space { + margin-bottom: 16px; + } + .card { + box-shadow: none; + } + } +} diff --git a/ui/src/pages/feature/components/NodeDetails/index.tsx b/ui/src/pages/feature/components/NodeDetails/index.tsx new file mode 100644 index 000000000..edbce587d --- /dev/null +++ b/ui/src/pages/feature/components/NodeDetails/index.tsx @@ -0,0 +1,66 @@ +import React from "react"; +import { useParams, useSearchParams } from "react-router-dom"; +import { fetchFeature, fetchDataSource } from "@/api"; +import { LoadingOutlined } from "@ant-design/icons"; +import { useQuery } from "react-query"; +import { Spin, Typography } from "antd"; +import { FeatureType } from "@/utils/utils"; +import FeatureNodeDetail from "./FeatureNodeDetail"; +import SourceNodeDetial from "./SourceNodeDetial"; + +import styles from "./index.module.less"; + +const { Paragraph } = Typography; + +const NodeDetails = () => { + const [searchParams] = useSearchParams(); + const { project } = useParams(); + const nodeId = searchParams.get("nodeId") as string; + const featureType = searchParams.get("featureType") as string; + + const isSource = featureType === FeatureType.Source; + const isFeature = + featureType === FeatureType.AnchorFeature || + featureType === FeatureType.DerivedFeature; + + const { isLoading, data } = useQuery( + ["nodeDetails", project, nodeId], + async () => { + if (isSource || isFeature) { + const api = isSource ? fetchDataSource : fetchFeature; + return await api(project!, nodeId); + } + }, + { + retry: false, + refetchOnWindowFocus: false, + } + ); + + return ( + } + > +
+ {data ? ( + isSource ? ( + + ) : ( + + ) + ) : ( + !isLoading && ( + + Click on source or feature node to show metadata and metric + details + + ) + )} +
+
+ ); +}; + +export default NodeDetails; diff --git a/ui/src/pages/feature/components/SearchBar/index.tsx b/ui/src/pages/feature/components/SearchBar/index.tsx new file mode 100644 index 000000000..1a32f28b2 --- /dev/null +++ b/ui/src/pages/feature/components/SearchBar/index.tsx @@ -0,0 +1,67 @@ +import React, { useRef } from "react"; +import { Form, Input, Button } from "antd"; +import { useNavigate } from "react-router-dom"; +import ProjectsSelect from "@/components/ProjectsSelect"; + +export interface SearchValue { + project?: string; + keyword?: string; +} + +export interface SearchBarProps { + defaultValues?: SearchValue; + onSearch?: (values: SearchValue) => void; +} + +const { Item } = Form; + +const SearchBar = (props: SearchBarProps) => { + const [form] = Form.useForm(); + + const navigate = useNavigate(); + + const { defaultValues, onSearch } = props; + + const timeRef = useRef(null); + + const onChangeKeyword = () => { + clearTimeout(timeRef.current); + timeRef.current = setTimeout(() => { + form.submit(); + }, 350); + }; + + return ( +
+
+ + + + + + +
+ +
+ ); +}; + +export default SearchBar; diff --git a/ui/src/pages/feature/featureDetails.tsx b/ui/src/pages/feature/featureDetails.tsx index 549e5e3f7..a5bef8688 100644 --- a/ui/src/pages/feature/featureDetails.tsx +++ b/ui/src/pages/feature/featureDetails.tsx @@ -1,218 +1,117 @@ -import React, { useEffect, useState } from "react"; -import { Alert, Button, Card, Col, Row, Space, Spin, Typography } from "antd"; +import React, { useEffect, useRef, useState } from "react"; +import { + Alert, + Button, + PageHeader, + Breadcrumb, + Space, + Card, + Spin, + Descriptions, +} from "antd"; import { LoadingOutlined } from "@ant-design/icons"; -import { useNavigate, useParams } from "react-router-dom"; -import { QueryStatus, useQuery } from "react-query"; +import { Link, useNavigate, useParams } from "react-router-dom"; +import { useQuery } from "react-query"; import { AxiosError } from "axios"; -import { fetchFeature } from "../../api"; -import { Feature, InputFeature } from "../../models/model"; -import { FeatureLineage } from "../../models/model"; -import { fetchFeatureLineages } from "../../api"; -import { Elements } from "react-flow-renderer"; -import Graph from "../../components/graph/graph"; -import { getElements } from "../../components/graph/utils"; - -const { Title } = Typography; - -type FeatureKeyProps = { feature: Feature }; -const FeatureKey = ({ feature }: FeatureKeyProps) => { - const keys = feature.attributes.key; - return ( - <> - {keys && keys.length > 0 && ( - - - Entity Key -
-

Full Name: {keys[0].fullName}

-

Key Column: {keys[0].keyColumn}

-

Description: {keys[0].description}

-

Key Column Alias: {keys[0].keyColumnAlias}

-

Key Column Type: {keys[0].keyColumnType}

-
-
- - )} - - ); -}; - -type FeatureTypeProps = { feature: Feature }; -const FeatureType = ({ feature }: FeatureTypeProps) => { - const type = feature.attributes.type; - return ( - <> - {type && ( - - - Type -
-

Dimension Type: {type.dimensionType}

-

Tensor Category: {type.tensorCategory}

-

Type: {type.type}

-

Value Type: {type.valType}

-
-
- - )} - - ); -}; - -type FeatureTransformationProps = { feature: Feature }; -const FeatureTransformation = ({ feature }: FeatureTransformationProps) => { - const transformation = feature.attributes.transformation; - return ( - <> - {transformation && ( - - - Transformation -
- {transformation.transformExpr && ( -

Expression: {transformation.transformExpr}

- )} - {transformation.filter &&

Filter: {transformation.filter}

} - {transformation.aggFunc && ( -

Aggregation: {transformation.aggFunc}

- )} - {transformation.limit &&

Limit: {transformation.limit}

} - {transformation.groupBy && ( -

Group By: {transformation.groupBy}

- )} - {transformation.window &&

Window: {transformation.window}

} - {transformation.defExpr && ( -

Expression: {transformation.defExpr}

- )} -
-
- - )} - - ); -}; +import { fetchFeature, fetchFeatureLineages } from "@/api"; +import { Feature, InputFeature, FeatureLineage } from "@/models/model"; +import FlowGraph from "@/components/FlowGraph"; +import CardDescriptions from "@/components/CardDescriptions"; +import { + FeatureKeyMap, + TransformationMap, + TypeMap, +} from "@/utils/attributesMapping"; +import { getJSONMap } from "@/utils/utils"; + +const contentStyle = { marginRight: 16 }; type InputAnchorFeaturesProps = { project: string; feature: Feature }; -const InputAnchorFeatures = ({ - project, - feature, -}: InputAnchorFeaturesProps) => { - const navigate = useNavigate(); - const inputAnchorFeatures = feature.attributes.inputAnchorFeatures; - return ( - <> - {inputAnchorFeatures && inputAnchorFeatures.length > 0 && ( - - - Input Anchor Features - {inputAnchorFeatures.map((input_feature) => ( - - ))} - - - )} - - ); + +const InputAnchorFeatures = (props: InputAnchorFeaturesProps) => { + const { project, feature } = props; + + const { inputAnchorFeatures } = feature.attributes; + + return inputAnchorFeatures?.length > 0 ? ( + + + {inputAnchorFeatures.map((input_feature) => ( + + + {input_feature.uniqueAttributes.qualifiedName} + + + ))} + + + ) : null; }; type InputDerivedFeaturesProps = { project: string; feature: Feature }; -const InputDerivedFeatures = ({ - project, - feature, -}: InputDerivedFeaturesProps) => { - const navigate = useNavigate(); - const inputDerivedFeatures = feature.attributes.inputDerivedFeatures; - return ( - <> - {inputDerivedFeatures && inputDerivedFeatures.length > 0 && ( - - - Input Derived Features - {inputDerivedFeatures.map((input_feature: InputFeature) => ( - - ))} - - - )} - - ); + +const InputDerivedFeatures = (props: InputDerivedFeaturesProps) => { + const { project, feature } = props; + + const { inputDerivedFeatures } = feature.attributes; + + return inputDerivedFeatures?.length ? ( + + + {inputDerivedFeatures.map((input_feature: InputFeature) => ( + + + {input_feature.uniqueAttributes.qualifiedName} + + + ))} + + + ) : null; }; const FeatureLineageGraph = () => { - const { featureId } = useParams() as Params; + const { project, featureId } = useParams() as Params; const [lineageData, setLineageData] = useState({ - guidEntityMap: null, - relations: null, + guidEntityMap: {}, + relations: [], }); - const [elements, SetElements] = useState([]); + const [loading, setLoading] = useState(false); + const mountedRef = useRef(true); + useEffect(() => { const fetchLineageData = async () => { setLoading(true); const data = await fetchFeatureLineages(featureId); - setLineageData(data); - setLoading(false); + if (mountedRef.current) { + setLineageData(data); + setLoading(false); + } }; fetchLineageData(); }, [featureId]); - // Generate graph data on client side, invoked after graphData or featureType is changed useEffect(() => { - const generateGraphData = async () => { - SetElements(getElements(lineageData, "all_nodes")!); + mountedRef.current = true; + return () => { + mountedRef.current = false; }; - - generateGraphData(); - }, [lineageData]); - - return ( - <> - {loading ? ( - } /> - ) : ( - - - Lineage - - - - )} - - ); + }, []); + + return !loading ? ( + + + + ) : null; }; type Params = { @@ -222,87 +121,89 @@ type Params = { const FeatureDetails = () => { const { project, featureId } = useParams() as Params; const navigate = useNavigate(); - const loadingIcon = ; - const { status, error, data } = useQuery( + + const { + isLoading, + error, + data = { attributes: {} } as Feature, + } = useQuery( ["featureId", featureId], - () => fetchFeature(project, featureId) + () => fetchFeature(project, featureId), + { + retry: false, + refetchOnWindowFocus: false, + } ); + const { attributes } = data; + const { transformation, key, type, name, tags } = attributes; - const openLineageWindow = () => { - const lineageUrl = `/projects/${project}/lineage`; - navigate(lineageUrl); - }; + const tagsMap = getJSONMap(tags); - const render = (status: QueryStatus): JSX.Element => { - switch (status) { - case "error": - return ( - - - - ); - case "idle": - return ( - - - - ); - case "loading": - return ( - - - - ); - case "success": - if (data === undefined) { - return ( - - - - ); - } else { - return ( - <> - - - {data.attributes.name} -
- - - -
-
- - - - - - - - -
-
- - ); + return ( +
+ + + Features + + Feature Details + } - } - }; - - return
{render(status)}
; + extra={[ + , + ]} + > + } + > + + {error && } + + + + {key?.map((item, index) => { + return ( + + ); + })} + + + + + + +
+
+ ); }; export default FeatureDetails; diff --git a/ui/src/pages/feature/features.tsx b/ui/src/pages/feature/features.tsx index 275cde11f..9ace6ead6 100644 --- a/ui/src/pages/feature/features.tsx +++ b/ui/src/pages/feature/features.tsx @@ -1,20 +1,27 @@ -import { Button, Card, Space, Typography } from "antd"; -import { useNavigate, useSearchParams } from "react-router-dom"; -import FeatureList from "../../components/featureList"; - -const { Title } = Typography; +import { useState } from "react"; +import { PageHeader } from "antd"; +import { useSearchParams } from "react-router-dom"; +import SearchBar, { SearchValue } from "./components/SearchBar"; +import FeatureTable from "./components/FeatureTable"; const Features = () => { const [searchParams] = useSearchParams(); - const project = (searchParams.get("project") as string) ?? ""; - const keyword = (searchParams.get("keyword") as string) ?? ""; + + const [search, setProject] = useState({ + project: searchParams.get("project") || undefined, + keyword: searchParams.get("keyword") || undefined, + }); + + const onSearch = (values: SearchValue) => { + setProject(values); + }; return (
- - Features - - + + + +
); }; diff --git a/ui/src/pages/feature/lineageGraph.tsx b/ui/src/pages/feature/lineageGraph.tsx index ac75dff91..d8b1473df 100644 --- a/ui/src/pages/feature/lineageGraph.tsx +++ b/ui/src/pages/feature/lineageGraph.tsx @@ -1,17 +1,17 @@ -import React, { useEffect, useState } from "react"; -import { Card, Col, Radio, Row, Spin, Tabs, Typography } from "antd"; +import React, { useEffect, useRef, useState } from "react"; +import { PageHeader, Row, Col, Radio, Tabs } from "antd"; import { useParams, useSearchParams } from "react-router-dom"; -import { Elements } from "react-flow-renderer"; -import Graph from "../../components/graph/graph"; -import { fetchProjectLineages } from "../../api"; -import { FeatureLineage } from "../../models/model"; -import { LoadingOutlined } from "@ant-design/icons"; -import GraphNodeDetails from "../../components/graph/graphNodeDetails"; -import { getElements } from "../../components/graph/utils"; -import { FeatureType } from "../../utils/utils"; +import FlowGraph from "@/components/FlowGraph"; +import { fetchProjectLineages } from "@/api"; +import { FeatureLineage } from "@/models/model"; +import { FeatureType } from "@/utils/utils"; +import NodeDetails from "./components/NodeDetails"; -const { Title } = Typography; -const { TabPane } = Tabs; +const items = [ + { label: "Metadata", key: "1", children: }, + { label: "Metrics", key: "2", children:

Under construction

}, // 务必填写 key + { label: "Jobs", key: "3", children:

Under construction

}, +]; type Params = { project: string; @@ -22,90 +22,75 @@ const LineageGraph = () => { const nodeId = searchParams.get("nodeId") as string; const [lineageData, setLineageData] = useState({ - guidEntityMap: null, - relations: null, + guidEntityMap: {}, + relations: [], }); + const [loading, setLoading] = useState(false); - const [elements, SetElements] = useState([]); - const [featureType, setFeatureType] = useState("all_nodes"); + + const [featureType, setFeatureType] = useState( + FeatureType.AllNodes + ); + + const mountedRef = useRef(true); // Fetch lineage data from server side, invoked immediately after component is mounted useEffect(() => { const fetchLineageData = async () => { setLoading(true); const data = await fetchProjectLineages(project); - setLineageData(data); - setLoading(false); + if (mountedRef.current) { + setLineageData(data); + setLoading(false); + } }; fetchLineageData(); }, [project]); - // Generate graph data on client side, invoked after graphData or featureType is changed + const toggleFeatureType = (type: FeatureType) => { + setFeatureType(type); + }; + useEffect(() => { - const generateGraphData = async () => { - SetElements(getElements(lineageData, featureType)!); + mountedRef.current = true; + return () => { + mountedRef.current = false; }; - - generateGraphData(); - }, [lineageData, featureType]); - - const toggleFeatureType = (type: string) => { - setFeatureType((prevType: string | null) => { - if (prevType === type) { - return null; - } - return type; - }); - }; + }, []); return (
- - Lineage {project} -
- toggleFeatureType(e.target.value)} - > - All Nodes - Source - Anchor - - Anchor Feature - - - Derived Feature - - -
-
- {loading ? ( - } + + toggleFeatureType(e.target.value)} + > + All Nodes + Source + + Anchor Feature + + + Derived Feature + + + + + - ) : ( - - - - - - - - - - -

Under construction

-
- -

Under construction

-
-
- -
- )} -
-
+ + + + + +
); }; diff --git a/ui/src/pages/feature/newFeature.tsx b/ui/src/pages/feature/newFeature.tsx index d51dd2aa0..50afd64c3 100644 --- a/ui/src/pages/feature/newFeature.tsx +++ b/ui/src/pages/feature/newFeature.tsx @@ -1,16 +1,13 @@ import React from "react"; -import { Card, Typography } from "antd"; -import FeatureForm from "../../components/featureForm"; - -const { Title } = Typography; +import { PageHeader } from "antd"; +import FeatureForm from "./components/FeatureForm"; const NewFeature = () => { return (
- - Create Feature + - +
); }; diff --git a/ui/src/pages/home/home.tsx b/ui/src/pages/home/home.tsx index f23b790cf..824d5db95 100644 --- a/ui/src/pages/home/home.tsx +++ b/ui/src/pages/home/home.tsx @@ -1,31 +1,58 @@ import React from "react"; -import { Link } from "react-router-dom"; -import { Card, Col, Row, Typography } from "antd"; + import { CopyOutlined, DatabaseOutlined, EyeOutlined, ProjectOutlined, } from "@ant-design/icons"; +import { Card, Col, Row, Typography } from "antd"; +import cs from "classnames"; +import { Link } from "react-router-dom"; + +import styles from "./index.module.less"; const { Title } = Typography; +const { Meta } = Card; + +const features = [ + { + icon: , + title: "Projects", + link: "/projects", + linkText: "See all", + }, + { + icon: , + title: "Sources", + link: "/dataSources", + linkText: "See all", + }, + { + icon: , + title: "Features", + link: "/features", + linkText: "See all", + }, + { + icon: , + title: "Monitoring", + link: "/monitoring", + linkText: "See all", + }, +]; const Home = () => { return ( -
- +
+ Welcome to Feathr Feature Store You can use Feathr UI to search features, identify data sources, track feature lineages and manage access controls. {" "} @@ -33,148 +60,43 @@ const Home = () => { - - - - - - - - - - - Projects - - - - - - See all - - - - - - - - - - - - + {features.map((item) => { + return ( + + + + {item.title} + + } + description={{item.linkText}} /> - - - - - Sources - - - - - - See all - - - - - - - - - - - - - - - - - Features - - - - - - See all - - - - - - - - - - - - - - - - - Monitoring - - - - - - See all - - - - - - - + + + ); + })} - - - + + + Need help to get started? Explore the following resources to get started with Feathr: