diff --git a/.coveragerc b/.coveragerc index 7633a29ef..a3cb38b9b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,2 +1,2 @@ [run] -omit = backend/dataall/cdkproxy/assets/*, backend/dataall/aws/* +omit = backend/dataall/base/cdkproxy/assets/*, backend/dataall/aws/* diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..c85eaa3c7 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# Run this command to always ignore formatting commits in `git blame` +# git config blame.ignoreRevsFile .git-blame-ignore-revs + +# Enforce frontend styling and remove dead code +fefc45c2cdc4d3107369c4d70210894d098a775c diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 9405ebc11..fab3625f7 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -6,6 +6,7 @@ on: branches: - main - release/* + - main-v2 jobs: run-tests: diff --git a/.github/workflows/minimal-security-check.yml b/.github/workflows/minimal-security-check.yml index 5c66c2d2e..842dcf79a 100644 --- a/.github/workflows/minimal-security-check.yml +++ b/.github/workflows/minimal-security-check.yml @@ -6,6 +6,7 @@ on: branches: - main - release/* + - main-v2 jobs: build: diff --git a/.github/workflows/static-checking.yml b/.github/workflows/static-checking.yml index 1ac6d4325..06eb20dc5 100644 --- a/.github/workflows/static-checking.yml +++ b/.github/workflows/static-checking.yml @@ -6,6 +6,7 @@ on: branches: - main - release/* + - main-v2 jobs: Check: diff --git a/.github/workflows/validate-db-schema.yml b/.github/workflows/validate-db-schema.yml index 6edb79172..b99f84e62 100644 --- a/.github/workflows/validate-db-schema.yml +++ b/.github/workflows/validate-db-schema.yml @@ -6,6 +6,7 @@ on: branches: - main - release/* + - main-v2 env: envname: local diff --git a/.gitignore b/.gitignore index aab8ca3d0..a0526480f 100644 --- a/.gitignore +++ b/.gitignore @@ -44,7 +44,7 @@ cobertura.xml /node_modules/ *venv* *pgdata* -backend/dataall/cdkproxy/cdk.out/ +backend/dataall/base/cdkproxy/cdk.out/ **.zip #Frontend /node_modules @@ -70,5 +70,3 @@ npm-debug.log* yarn-debug.log* yarn-error.log* .idea -/backend/dataall/cdkproxy/assets/gluedataqualityjob/datahubdq.zip -/backend/dataall/cdkproxy/assets/glueprofilingjob/datahubdq.zip diff --git a/Makefile b/Makefile index 4c695ed4f..fed73e18c 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ install-backend: pip install -r backend/requirements.txt install-cdkproxy: - pip install -r backend/dataall/cdkproxy/requirements.txt + pip install -r backend/dataall/base/cdkproxy/requirements.txt install-tests: pip install -r tests/requirements.txt diff --git a/backend/api_handler.py b/backend/api_handler.py index 46d902f6d..1e3a8597a 100644 --- a/backend/api_handler.py +++ b/backend/api_handler.py @@ -9,11 +9,15 @@ graphql_sync, ) -from dataall.api.Objects import bootstrap as bootstrap_schema, get_executable_schema -from dataall.aws.handlers.service_handlers import Worker -from dataall.aws.handlers.sqs import SqsQueue -from dataall.db import init_permissions, get_engine, api, permissions -from dataall.searchproxy import connect +from dataall.base.api import bootstrap as bootstrap_schema, get_executable_schema +from dataall.core.tasks.service_handlers import Worker +from dataall.base.aws.sqs import SqsQueue +from dataall.base.context import set_context, dispose_context, RequestContext +from dataall.core.permissions.db import save_permissions_with_tenant +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.base.db import get_engine +from dataall.core.permissions import permissions +from dataall.base.loader import load_modules, ImportMode logger = logging.getLogger() logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) @@ -23,14 +27,14 @@ for name in ['boto3', 's3transfer', 'botocore', 'boto']: logging.getLogger(name).setLevel(logging.ERROR) +load_modules(modes={ImportMode.API}) SCHEMA = bootstrap_schema() TYPE_DEFS = gql(SCHEMA.gql(with_directives=False)) ENVNAME = os.getenv('envname', 'local') ENGINE = get_engine(envname=ENVNAME) -ES = connect(envname=ENVNAME) Worker.queue = SqsQueue.send -init_permissions(ENGINE) +save_permissions_with_tenant(ENGINE) def resolver_adapter(resolver): @@ -42,7 +46,6 @@ def adapted(obj, info, **kwargs): username=info.context['username'], groups=info.context['groups'], schema=info.context['schema'], - cdkproxyurl=info.context['cdkproxyurl'], ), source=obj or None, **kwargs, @@ -97,7 +100,6 @@ def handler(event, context): log.info('Lambda Event %s', event) log.debug('Env name %s', ENVNAME) - log.debug('ElasticSearch %s', ES) log.debug('Engine %s', ENGINE.engine.url) if event['httpMethod'] == 'OPTIONS': @@ -117,14 +119,14 @@ def handler(event, context): groups = get_groups(event['requestContext']['authorizer']['claims']) with ENGINE.scoped_session() as session: for group in groups: - policy = api.TenantPolicy.find_tenant_policy( + policy = TenantPolicy.find_tenant_policy( session, group, 'dataall' ) if not policy: print( f'No policy found for Team {group}. Attaching TENANT_ALL permissions' ) - api.TenantPolicy.attach_group_tenant_policy( + TenantPolicy.attach_group_tenant_policy( session=session, group=group, permissions=permissions.TENANT_ALL, @@ -135,14 +137,15 @@ def handler(event, context): print(f'Error managing groups due to: {e}') groups = [] + set_context(RequestContext(ENGINE, username, groups)) + app_context = { 'engine': ENGINE, - 'es': ES, 'username': username, 'groups': groups, 'schema': SCHEMA, - 'cdkproxyurl': None, } + else: raise Exception(f'Could not initialize user context from event {event}') @@ -150,6 +153,8 @@ def handler(event, context): success, response = graphql_sync( schema=executable_schema, data=query, context_value=app_context ) + + dispose_context() response = json.dumps(response) log.info('Lambda Response %s', response) diff --git a/backend/aws_handler.py b/backend/aws_handler.py index 56089ab34..8ad2b2157 100644 --- a/backend/aws_handler.py +++ b/backend/aws_handler.py @@ -2,8 +2,9 @@ import logging import os -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import get_engine +from dataall.core.tasks.service_handlers import Worker +from dataall.base.db import get_engine +from dataall.base.loader import load_modules, ImportMode logger = logging.getLogger() logger.setLevel(os.environ.get('LOG_LEVEL')) @@ -13,6 +14,8 @@ engine = get_engine(envname=ENVNAME) +load_modules(modes={ImportMode.HANDLERS}) + def handler(event, context=None): """Processes messages received from sqs""" diff --git a/backend/cdkproxymain.py b/backend/cdkproxymain.py index 343d5692b..f30ed3b82 100644 --- a/backend/cdkproxymain.py +++ b/backend/cdkproxymain.py @@ -7,9 +7,12 @@ from botocore.exceptions import ClientError from fastapi import FastAPI, BackgroundTasks, status, Response -import dataall.cdkproxy.cdk_cli_wrapper as wrapper -from dataall.cdkproxy.stacks import StackManager -from dataall import db +import dataall.base.cdkproxy.cdk_cli_wrapper as wrapper +from dataall.base import db +from dataall.base.loader import load_modules, ImportMode +from dataall.base.cdkproxy.stacks import StackManager +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.stacks.db.stack_models import Stack print('\n'.join(sys.path)) @@ -20,7 +23,7 @@ f"Application started for envname= `{ENVNAME}` DH_DOCKER_VERSION:{os.environ.get('DH_DOCKER_VERSION')}" ) - +load_modules(modes={ImportMode.CDK}) StackManager.registered_stacks() @@ -29,7 +32,7 @@ def connect(): try: engine = db.get_engine(envname=ENVNAME) with engine.scoped_session() as session: - orgs = session.query(db.models.Organization).all() + orgs = session.query(Organization).all() return engine except Exception as e: raise Exception('Connection Error') @@ -151,7 +154,7 @@ async def create_stack( for stackid in stack_ids: with engine.scoped_session() as session: - stack: db.models.Stack = session.query(db.models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_302_FOUND @@ -194,7 +197,7 @@ async def delete_stack( 'message': f'Failed to connect to database for environment `{ENVNAME}`', } with engine.scoped_session() as session: - stack: db.models.Stack = session.query(db.models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_302_FOUND @@ -232,7 +235,7 @@ def get_stack(stackid: str, response: Response): 'message': f'Failed to connect to database for environment `{ENVNAME}`', } with engine.scoped_session() as session: - stack: db.models.Stack = session.query(db.models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_404_NOT_FOUND diff --git a/backend/dataall/__init__.py b/backend/dataall/__init__.py index dad3c2676..a6387d880 100644 --- a/backend/dataall/__init__.py +++ b/backend/dataall/__init__.py @@ -1 +1,2 @@ -from . import utils, db, aws, api, searchproxy, tasks, version +from . import core, version +from .base import utils, db, api diff --git a/backend/dataall/api/Objects/Activity/__init__.py b/backend/dataall/api/Objects/Activity/__init__.py deleted file mode 100644 index 7a595b458..000000000 --- a/backend/dataall/api/Objects/Activity/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - queries, - resolvers, - schema, - mutations, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Activity/input_types.py b/backend/dataall/api/Objects/Activity/input_types.py deleted file mode 100644 index e8c81dc89..000000000 --- a/backend/dataall/api/Objects/Activity/input_types.py +++ /dev/null @@ -1,9 +0,0 @@ -from ... import gql - -ActivityFilter = gql.InputType( - name='ActivityFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Activity/queries.py b/backend/dataall/api/Objects/Activity/queries.py deleted file mode 100644 index 9133c3d94..000000000 --- a/backend/dataall/api/Objects/Activity/queries.py +++ /dev/null @@ -1,10 +0,0 @@ -from ... import gql -from .resolvers import * - - -listUserActivities = gql.QueryField( - name='listUserActivities', - type=gql.Ref('ActivitySearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('ActivityFilter'))], - resolver=list_user_activities, -) diff --git a/backend/dataall/api/Objects/Activity/resolvers.py b/backend/dataall/api/Objects/Activity/resolvers.py deleted file mode 100644 index 993e55db7..000000000 --- a/backend/dataall/api/Objects/Activity/resolvers.py +++ /dev/null @@ -1,17 +0,0 @@ -from ....api.context import Context -from ....db import paginate, models - - -def list_user_activities(context: Context, source, filter: dict = None): - if not filter: - filter = {} - print('filter = ', filter) - with context.engine.scoped_session() as session: - q = ( - session.query(models.Activity) - .filter(models.Activity.owner == context.username) - .order_by(models.Activity.created.desc()) - ) - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 10) - ).to_dict() diff --git a/backend/dataall/api/Objects/Activity/schema.py b/backend/dataall/api/Objects/Activity/schema.py deleted file mode 100644 index 5e6734680..000000000 --- a/backend/dataall/api/Objects/Activity/schema.py +++ /dev/null @@ -1,28 +0,0 @@ -from ... import gql - -Activity = gql.ObjectType( - name='Activity', - fields=[ - gql.Field(name='activityUri', type=gql.ID), - gql.Field(name='owner', type=gql.NonNullableType(gql.String)), - gql.Field(name='target', type=gql.String), - gql.Field(name='targetType', type=gql.String), - gql.Field(name='targetUri', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='action', type=gql.String), - gql.Field(name='summary', type=gql.String), - ], -) - - -ActivitySearchResult = gql.ObjectType( - name='ActivitySearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Activity)), - ], -) diff --git a/backend/dataall/api/Objects/AthenaQueryResult/__init__.py b/backend/dataall/api/Objects/AthenaQueryResult/__init__.py deleted file mode 100644 index d130be5ef..000000000 --- a/backend/dataall/api/Objects/AthenaQueryResult/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from . import schema, helpers -from .wrapper import ( - AthenaQueryResult, - AthenaQueryResultStatus, -) - -__all__ = ['schema', 'helpers', 'AthenaQueryResult', 'AthenaQueryResultStatus'] diff --git a/backend/dataall/api/Objects/AthenaQueryResult/helpers.py b/backend/dataall/api/Objects/AthenaQueryResult/helpers.py deleted file mode 100644 index e90f05e71..000000000 --- a/backend/dataall/api/Objects/AthenaQueryResult/helpers.py +++ /dev/null @@ -1,83 +0,0 @@ -import nanoid -from pyathena import connect - -from ....db import models -from ....aws.handlers.sts import SessionHelper - - -def random_key(): - return nanoid.generate() - - -def run_query(environment: models.Environment, sql=None): - - boto3_session = SessionHelper.remote_session(accountid=environment.AwsAccountId) - creds = boto3_session.get_credentials() - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group='primary', - s3_staging_dir=f's3://{environment.EnvironmentDefaultBucketName}/preview/', - region_name=environment.region, - ) - cursor = connection.cursor() - cursor.execute(sql) - columns = [] - for f in cursor.description: - columns.append({'columnName': f[0], 'typeName': 'String'}) - - rows = [] - for row in cursor: - record = {'cells': []} - for col_position, column in enumerate(columns): - cell = {} - cell['columnName'] = column['columnName'] - cell['typeName'] = column['typeName'] - cell['value'] = str(row[col_position]) - record['cells'].append(cell) - rows.append(record) - return { - 'error': None, - 'AthenaQueryId': cursor.query_id, - 'ElapsedTime': cursor.total_execution_time_in_millis, - 'rows': rows, - 'columns': columns, - } - - -def run_query_with_role(environment: models.Environment, environment_group: models.EnvironmentGroup, sql=None): - base_session = SessionHelper.remote_session(accountid=environment.AwsAccountId) - boto3_session = SessionHelper.get_session(base_session=base_session, role_arn=environment_group.environmentIAMRoleArn) - creds = boto3_session.get_credentials() - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group=environment_group.environmentAthenaWorkGroup, - s3_staging_dir=f's3://{environment.EnvironmentDefaultBucketName}/athenaqueries/{environment_group.environmentAthenaWorkGroup}/', - region_name=environment.region, - ) - cursor = connection.cursor() - cursor.execute(sql) - columns = [] - for f in cursor.description: - columns.append({'columnName': f[0], 'typeName': 'String'}) - - rows = [] - for row in cursor: - record = {'cells': []} - for col_position, column in enumerate(columns): - cell = {} - cell['columnName'] = column['columnName'] - cell['typeName'] = column['typeName'] - cell['value'] = str(row[col_position]) - record['cells'].append(cell) - rows.append(record) - return { - 'error': None, - 'AthenaQueryId': cursor.query_id, - 'ElapsedTime': cursor.total_execution_time_in_millis, - 'rows': rows, - 'columns': columns, - } diff --git a/backend/dataall/api/Objects/AthenaQueryResult/schema.py b/backend/dataall/api/Objects/AthenaQueryResult/schema.py deleted file mode 100644 index 34ff2fb3e..000000000 --- a/backend/dataall/api/Objects/AthenaQueryResult/schema.py +++ /dev/null @@ -1,45 +0,0 @@ -from ... import gql - -AthenaResultColumnDescriptor = gql.ObjectType( - name='AthenaResultColumnDescriptor', - fields=[ - gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), - gql.Field(name='typeName', type=gql.NonNullableType(gql.String)), - ], -) - - -AthenaResultRecordCell = gql.ObjectType( - name='AthenaResultRecordCell', - fields=[ - gql.Field(name='value', type=gql.String), - gql.Field(name='typeName', type=gql.NonNullableType(gql.String)), - gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), - ], -) - -AthenaResultRecord = gql.ObjectType( - name='AthenaResultRecord', - fields=[ - gql.Field(name='cells', type=gql.ArrayType(gql.Ref('AthenaResultRecordCell'))) - ], -) - - -AthenaQueryResult = gql.ObjectType( - name='AthenaQueryResult', - fields=[ - gql.Field(name='Error', type=gql.String), - gql.Field(name='OutputLocation', type=gql.String), - gql.Field(name='AthenaQueryId', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='ElapsedTimeInMs', type=gql.Integer), - gql.Field(name='DataScannedInBytes', type=gql.Integer), - gql.Field(name='Status', type=gql.String), - gql.Field( - name='columns', type=gql.ArrayType(gql.Ref('AthenaResultColumnDescriptor')) - ), - gql.Field(name='rows', type=gql.ArrayType(gql.Ref('AthenaResultRecord'))), - ], -) diff --git a/backend/dataall/api/Objects/AthenaQueryResult/wrapper.py b/backend/dataall/api/Objects/AthenaQueryResult/wrapper.py deleted file mode 100644 index 394ec44ae..000000000 --- a/backend/dataall/api/Objects/AthenaQueryResult/wrapper.py +++ /dev/null @@ -1,79 +0,0 @@ -from enum import Enum -from typing import List - - -class AthenaQueryResultStatus(Enum): - CANCELLED = 'CANCELLED' - FAILED = 'FAILED' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - SUCCEEDED = 'SUCCEEDED' - - -class AthenaQueryResult: - props = [ - 'Status', - 'Error', - 'AthenaQueryId', - 'ElapsedTimeInMs', - 'DataScannedInBytes', - 'OutputLocation', - 'rows', - 'columns', - ] - - def __init__( - self, - Error: str = None, - Status: str = None, - AthenaQueryId: str = None, - ElapsedTimeInMs: int = None, - DataScannedInBytes: int = None, - OutputLocation: str = None, - rows: List = None, - columns: List = None, - **kwargs - ): - self._error = Error - self._status = Status - self._query_id = AthenaQueryId - self._elapsed_time = ElapsedTimeInMs - self._data_scanned = DataScannedInBytes - self._loc = OutputLocation - self._rows = rows - self._columns = columns - - def to_dict(self): - return {k: getattr(self, k) for k in AthenaQueryResult.props} - - @property - def Status(self) -> AthenaQueryResultStatus: - return self._status - - @property - def Error(self) -> str: - return self._error - - @property - def AthenaQueryId(self): - return self._query_id - - @property - def ElapsedTimeInMs(self): - return self._elapsed_time - - @property - def DataScannedInBytes(self): - return self._data_scanned - - @property - def OutputLocation(self): - return self._loc - - @property - def rows(self): - return self._rows - - @property - def columns(self): - return self._columns diff --git a/backend/dataall/api/Objects/Dashboard/__init__.py b/backend/dataall/api/Objects/Dashboard/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Dashboard/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Dashboard/input_types.py b/backend/dataall/api/Objects/Dashboard/input_types.py deleted file mode 100644 index 1686c31e3..000000000 --- a/backend/dataall/api/Objects/Dashboard/input_types.py +++ /dev/null @@ -1,43 +0,0 @@ -from ... import gql - -ImportDashboardInput = gql.InputType( - name='ImportDashboardInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='SamlGroupName', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='terms', type=gql.ArrayType(gql.String)), - ], -) - -UpdateDashboardInput = gql.InputType( - name='UpdateDashboardInput', - arguments=[ - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='terms', type=gql.ArrayType(gql.String)), - ], -) - -DashboardFilter = gql.InputType( - name='DashboardFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -DashboardShareFilter = gql.InputType( - name='DashboardShareFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Dashboard/mutations.py b/backend/dataall/api/Objects/Dashboard/mutations.py deleted file mode 100644 index 7af472838..000000000 --- a/backend/dataall/api/Objects/Dashboard/mutations.py +++ /dev/null @@ -1,72 +0,0 @@ -from ... import gql -from .resolvers import * - - -importDashboard = gql.MutationField( - name='importDashboard', - type=gql.Ref('Dashboard'), - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('ImportDashboardInput')) - ) - ], - resolver=import_dashboard, -) - -updateDashboard = gql.MutationField( - name='updateDashboard', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('UpdateDashboardInput')) - ), - ], - type=gql.Ref('Dashboard'), - resolver=update_dashboard, -) - - -deleteDashboard = gql.MutationField( - name='deleteDashboard', - type=gql.Boolean, - args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], - resolver=delete_dashboard, -) - - -shareDashboard = gql.MutationField( - name='shareDashboard', - type=gql.Ref('DashboardShare'), - args=[ - gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - ], - resolver=share_dashboard, -) - -requestDashboardShare = gql.MutationField( - name='requestDashboardShare', - type=gql.Ref('DashboardShare'), - args=[ - gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - ], - resolver=request_dashboard_share, -) - -approveDashboardShare = gql.MutationField( - name='approveDashboardShare', - type=gql.Ref('DashboardShare'), - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - ], - resolver=approve_dashboard_share, -) - -rejectDashboardShare = gql.MutationField( - name='rejectDashboardShare', - type=gql.Ref('DashboardShare'), - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - ], - resolver=reject_dashboard_share, -) diff --git a/backend/dataall/api/Objects/Dashboard/queries.py b/backend/dataall/api/Objects/Dashboard/queries.py deleted file mode 100644 index d8d3b9982..000000000 --- a/backend/dataall/api/Objects/Dashboard/queries.py +++ /dev/null @@ -1,45 +0,0 @@ -from ... import gql -from .resolvers import * - -searchDashboards = gql.QueryField( - name='searchDashboards', - args=[gql.Argument(name='filter', type=gql.Ref('DashboardFilter'))], - resolver=list_dashboards, - type=gql.Ref('DashboardSearchResults'), -) - -getDashboard = gql.QueryField( - name='getDashboard', - args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Dashboard'), - resolver=get_dashboard, -) - - -getAuthorSession = gql.QueryField( - name='getAuthorSession', - args=[ - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_designer_url, -) - - -getReaderSession = gql.QueryField( - name='getReaderSession', - args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_quicksight_reader_url, -) - -listDashboardShares = gql.QueryField( - name='listDashboardShares', - args=[ - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DashboardShareFilter')), - ], - resolver=list_dashboard_shares, - type=gql.Ref('DashboardShareSearchResults'), -) diff --git a/backend/dataall/api/Objects/Dashboard/resolvers.py b/backend/dataall/api/Objects/Dashboard/resolvers.py deleted file mode 100644 index 799354207..000000000 --- a/backend/dataall/api/Objects/Dashboard/resolvers.py +++ /dev/null @@ -1,327 +0,0 @@ -import os -from .... import db -from ....api.constants import DashboardRole -from ....api.context import Context -from ....aws.handlers.quicksight import Quicksight -from ....aws.handlers.parameter_store import ParameterStoreManager -from ....db import permissions, models -from ....db.api import ResourcePolicy, Glossary, Vote -from ....searchproxy import indexers - - -def get_quicksight_reader_url(context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - dash: models.Dashboard = session.query(models.Dashboard).get(dashboardUri) - env: models.Environment = session.query(models.Environment).get( - dash.environmentUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=dash.dashboardUri, - permission_name=permissions.GET_DASHBOARD, - ) - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=permissions.GET_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - if dash.SamlGroupName in context.groups: - url = Quicksight.get_reader_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=dash.DashboardId, - ) - else: - shared_groups = db.api.Dashboard.query_all_user_groups_shareddashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri - ) - if not shared_groups: - raise db.exceptions.UnauthorizedOperation( - action=permissions.GET_DASHBOARD, - message='Dashboard has not been shared with your Teams', - ) - - session_type = ParameterStoreManager.get_parameter_value( - parameter_path=f"/dataall/{os.getenv('envname', 'local')}/quicksight/sharedDashboardsSessions" - ) - - if session_type == 'reader': - url = Quicksight.get_shared_reader_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - GroupName=shared_groups[0], - DashboardId=dash.DashboardId, - ) - else: - url = Quicksight.get_anonymous_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=dash.DashboardId, - ) - return url - - -def get_quicksight_designer_url( - context, source, environmentUri: str = None, dashboardUri: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.CREATE_DASHBOARD, - ) - env: models.Environment = session.query(models.Environment).get(environmentUri) - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - - url = Quicksight.get_author_session( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - UserRole='AUTHOR', - ) - - return url - - -def import_dashboard(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=input['environmentUri'], - permission_name=permissions.CREATE_DASHBOARD, - ) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, input['environmentUri'] - ) - - if not env.dashboardsEnabled: - raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, - message=f'Dashboards feature is disabled for the environment {env.label}', - ) - - can_import = Quicksight.can_import_dashboard( - AwsAccountId=env.AwsAccountId, - region=env.region, - UserName=context.username, - DashboardId=input.get('dashboardId'), - ) - - if not can_import: - raise db.exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, - message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', - ) - - input['environment'] = env - dashboard = db.api.Dashboard.import_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=env.environmentUri, - data=input, - check_perm=True, - ) - - indexers.upsert_dashboard(session, context.es, dashboard.dashboardUri) - - return dashboard - - -def update_dashboard(context, source, input: dict = None): - with context.engine.scoped_session() as session: - dashboard = db.api.Dashboard.get_dashboard_by_uri( - session, input['dashboardUri'] - ) - input['dashboard'] = dashboard - db.api.Dashboard.update_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboard.dashboardUri, - data=input, - check_perm=True, - ) - - indexers.upsert_dashboard(session, context.es, dashboard.dashboardUri) - - return dashboard - - -def list_dashboards(context: Context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Dashboard.paginated_user_dashboards( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def get_dashboard(context: Context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - return db.api.Dashboard.get_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=None, - check_perm=True, - ) - - -def resolve_user_role(context: Context, source: models.Dashboard): - if context.username and source.owner == context.username: - return DashboardRole.Creator.value - elif context.groups and source.SamlGroupName in context.groups: - return DashboardRole.Admin.value - return DashboardRole.Shared.value - - -def get_dashboard_organization(context: Context, source: models.Dashboard, **kwargs): - with context.engine.scoped_session() as session: - org = session.query(models.Organization).get(source.organizationUri) - return org - - -def get_dashboard_environment(context: Context, source: models.Dashboard, **kwargs): - with context.engine.scoped_session() as session: - env = session.query(models.Environment).get(source.environmentUri) - return env - - -def request_dashboard_share( - context: Context, - source: models.Dashboard, - principalId: str = None, - dashboardUri: str = None, -): - with context.engine.scoped_session() as session: - return db.api.Dashboard.request_dashboard_share( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data={'principalId': principalId}, - check_perm=True, - ) - - -def approve_dashboard_share( - context: Context, - source: models.Dashboard, - shareUri: str = None, -): - with context.engine.scoped_session() as session: - share = db.api.Dashboard.get_dashboard_share_by_uri(session, shareUri) - dashboard = db.api.Dashboard.get_dashboard_by_uri(session, share.dashboardUri) - return db.api.Dashboard.approve_dashboard_share( - session=session, - username=context.username, - groups=context.groups, - uri=dashboard.dashboardUri, - data={'share': share, 'shareUri': shareUri}, - check_perm=True, - ) - - -def reject_dashboard_share( - context: Context, - source: models.Dashboard, - shareUri: str = None, -): - with context.engine.scoped_session() as session: - share = db.api.Dashboard.get_dashboard_share_by_uri(session, shareUri) - dashboard = db.api.Dashboard.get_dashboard_by_uri(session, share.dashboardUri) - return db.api.Dashboard.reject_dashboard_share( - session=session, - username=context.username, - groups=context.groups, - uri=dashboard.dashboardUri, - data={'share': share, 'shareUri': shareUri}, - check_perm=True, - ) - - -def list_dashboard_shares( - context: Context, - source: models.Dashboard, - dashboardUri: str = None, - filter: dict = None, -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Dashboard.paginated_dashboard_shares( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=filter, - check_perm=True, - ) - - -def share_dashboard( - context: Context, - source: models.Dashboard, - principalId: str = None, - dashboardUri: str = None, -): - with context.engine.scoped_session() as session: - return db.api.Dashboard.share_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data={'principalId': principalId}, - check_perm=True, - ) - - -def delete_dashboard(context: Context, source, dashboardUri: str = None): - with context.engine.scoped_session() as session: - db.api.Dashboard.delete_dashboard( - session=session, - username=context.username, - groups=context.groups, - uri=dashboardUri, - data=None, - check_perm=True, - ) - indexers.delete_doc(es=context.es, doc_id=dashboardUri) - return True - - -def resolve_glossary_terms(context: Context, source: models.Dashboard, **kwargs): - with context.engine.scoped_session() as session: - return Glossary.get_glossary_terms_links( - session, source.dashboardUri, 'Dashboard' - ) - - -def resolve_upvotes(context: Context, source: models.Dashboard, **kwargs): - with context.engine.scoped_session() as session: - return Vote.count_upvotes( - session, None, None, source.dashboardUri, data={'targetType': 'dashboard'} - ) diff --git a/backend/dataall/api/Objects/Dashboard/schema.py b/backend/dataall/api/Objects/Dashboard/schema.py deleted file mode 100644 index a8db3f3bf..000000000 --- a/backend/dataall/api/Objects/Dashboard/schema.py +++ /dev/null @@ -1,84 +0,0 @@ -from ... import gql -from .resolvers import * -from ...constants import DashboardRole - -Dashboard = gql.ObjectType( - name='Dashboard', - fields=[ - gql.Field('dashboardUri', type=gql.ID), - gql.Field('name', type=gql.String), - gql.Field('label', type=gql.String), - gql.Field('description', type=gql.String), - gql.Field('DashboardId', type=gql.String), - gql.Field('tags', type=gql.ArrayType(gql.String)), - gql.Field('created', type=gql.String), - gql.Field('updated', type=gql.String), - gql.Field('owner', type=gql.String), - gql.Field('SamlGroupName', type=gql.String), - gql.Field( - 'organization', - type=gql.Ref('Organization'), - resolver=get_dashboard_organization, - ), - gql.Field( - 'environment', - type=gql.Ref('Environment'), - resolver=get_dashboard_environment, - ), - gql.Field( - 'userRoleForDashboard', - type=DashboardRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='terms', - type=gql.Ref('TermSearchResult'), - resolver=resolve_glossary_terms, - ), - gql.Field( - 'upvotes', - type=gql.Integer, - resolver=resolve_upvotes, - ), - ], -) - -DashboardShare = gql.ObjectType( - name='DashboardShare', - fields=[ - gql.Field('shareUri', type=gql.ID), - gql.Field('dashboardUri', type=gql.ID), - gql.Field('name', type=gql.String), - gql.Field('label', type=gql.String), - gql.Field('SamlGroupName', type=gql.String), - gql.Field('status', type=gql.String), - gql.Field('owner', type=gql.String), - gql.Field('tags', type=gql.ArrayType(gql.String)), - gql.Field('created', type=gql.String), - gql.Field('updated', type=gql.String), - ], -) - -DashboardSearchResults = gql.ObjectType( - name='DashboardSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Dashboard)), - ], -) - -DashboardShareSearchResults = gql.ObjectType( - name='DashboardShareSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(DashboardShare)), - ], -) diff --git a/backend/dataall/api/Objects/DataPipeline/__init__.py b/backend/dataall/api/Objects/DataPipeline/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/DataPipeline/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/DataPipeline/input_types.py b/backend/dataall/api/Objects/DataPipeline/input_types.py deleted file mode 100644 index 98ccf23b3..000000000 --- a/backend/dataall/api/Objects/DataPipeline/input_types.py +++ /dev/null @@ -1,75 +0,0 @@ -from ... import gql - -NewDataPipelineInput = gql.InputType( - name='NewDataPipelineInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='SamlGroupName', type=gql.NonNullableType(gql.String)), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='devStrategy', type=gql.NonNullableType(gql.String)), - ], -) - -NewDataPipelineEnvironmentInput = gql.InputType( - name='NewDataPipelineEnvironmentInput', - arguments=[ - gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), - gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), - gql.Argument(name='pipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), - ], -) - -UpdateDataPipelineInput = gql.InputType( - name='UpdateDataPipelineInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - ], -) - -DataPipelineFilter = gql.InputType( - name='DataPipelineFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='region', type=gql.ArrayType(gql.String)), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='type', type=gql.ArrayType(gql.String)), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -DataPipelineEnvironmentFilter = gql.InputType( - name='DataPipelineEnvironmentFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument(name='pipelineUri', type=gql.String), - ], -) - -DataPipelineBrowseInput = gql.InputType( - name='DataPipelineBrowseInput', - arguments=[ - gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='branch', type=gql.NonNullableType(gql.String)), - gql.Argument(name='folderPath', type=gql.String), - ], -) - - -DataPipelineFileContentInput = gql.InputType( - name='DataPipelineFileContentInput', - arguments=[ - gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='branch', type=gql.NonNullableType(gql.String)), - gql.Argument(name='absolutePath', type=gql.NonNullableType(gql.String)), - ], -) diff --git a/backend/dataall/api/Objects/DataPipeline/mutations.py b/backend/dataall/api/Objects/DataPipeline/mutations.py deleted file mode 100644 index 9273e43ff..000000000 --- a/backend/dataall/api/Objects/DataPipeline/mutations.py +++ /dev/null @@ -1,64 +0,0 @@ -from ... import gql -from .resolvers import * - -createDataPipeline = gql.MutationField( - name='createDataPipeline', - type=gql.Ref('DataPipeline'), - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineInput')) - ) - ], - resolver=create_pipeline, -) - -updateDataPipeline = gql.MutationField( - name='updateDataPipeline', - type=gql.Ref('DataPipeline'), - args=[ - gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('UpdateDataPipelineInput')), - ], - resolver=update_pipeline, -) - -deleteDataPipeline = gql.MutationField( - name='deleteDataPipeline', - type=gql.Boolean, - args=[ - gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - resolver=delete_pipeline, -) - -createDataPipelineEnvironment = gql.MutationField( - name='createDataPipelineEnvironment', - type=gql.Ref('DataPipelineEnvironment'), - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineEnvironmentInput')) - ) - ], - resolver=create_pipeline_environment, -) - -deleteDataPipelineEnvironment = gql.MutationField( - name='deleteDataPipelineEnvironment', - type=gql.Boolean, - args=[ - gql.Argument(name='envPipelineUri', type=gql.NonNullableType(gql.String)) - ], - resolver=delete_pipeline_environment, -) - -updateDataPipelineEnvironment = gql.MutationField( - name='updateDataPipelineEnvironment', - type=gql.Ref('DataPipelineEnvironment'), - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineEnvironmentInput')) - ) - ], - resolver=update_pipeline_environment, -) diff --git a/backend/dataall/api/Objects/DataPipeline/queries.py b/backend/dataall/api/Objects/DataPipeline/queries.py deleted file mode 100644 index f449f9d63..000000000 --- a/backend/dataall/api/Objects/DataPipeline/queries.py +++ /dev/null @@ -1,64 +0,0 @@ -from ... import gql -from .resolvers import * - -listDataPipelines = gql.QueryField( - name='listDataPipelines', - args=[gql.Argument(name='filter', type=gql.Ref('DataPipelineFilter'))], - resolver=list_pipelines, - type=gql.Ref('DataPipelineSearchResults'), -) - -getDataPipeline = gql.QueryField( - name='getDataPipeline', - args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DataPipeline'), - resolver=get_pipeline, -) - - -browseDataPipelineRepository = gql.QueryField( - name='browseDataPipelineRepository', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('DataPipelineBrowseInput')) - ) - ], - resolver=ls, - type=gql.String, -) - -listDataPipelineBranches = gql.QueryField( - name='listDataPipelineBranches', - args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], - resolver=list_branches, - type=gql.ArrayType(gql.String), -) - - -getDataPipelineFileContent = gql.QueryField( - name='getDataPipelineFileContent', - args=[gql.Argument(name='input', type=gql.Ref('DataPipelineFileContentInput'))], - resolver=cat, - type=gql.String, -) - -getDataPipelineCredsLinux = gql.QueryField( - name='getDataPipelineCredsLinux', - args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_creds, -) - -listDataPipelineEnvironments = gql.QueryField( - name='listDataPipelineEnvironments', - args=[gql.Argument(name='filter', type=gql.Ref('DataPipelineEnvironmentFilter'))], - resolver=list_pipeline_environments, - type=gql.Ref('DataPipelineEnvironmentSearchResults'), -) - -getDataPipelineEnvironment = gql.QueryField( - name='getDataPipelineEnvironment', - args=[gql.Argument(name='envPipelineUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DataPipelineEnvironment'), - resolver=get_pipeline_environment, -) diff --git a/backend/dataall/api/Objects/DataPipeline/resolvers.py b/backend/dataall/api/Objects/DataPipeline/resolvers.py deleted file mode 100644 index e431a0cbe..000000000 --- a/backend/dataall/api/Objects/DataPipeline/resolvers.py +++ /dev/null @@ -1,443 +0,0 @@ -import json -import logging - -from ....aws.handlers import stepfunction as helpers -from ...Objects.Stack import stack_helper -from ...constants import DataPipelineRole -from ...context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import permissions, models, exceptions -from ....db.api import Pipeline, Environment, ResourcePolicy, Stack, KeyValueTag - -log = logging.getLogger(__name__) - - -def create_pipeline(context: Context, source, input=None): - with context.engine.scoped_session() as session: - pipeline = Pipeline.create_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - if input['devStrategy'] == 'cdk-trunk': - Stack.create_stack( - session=session, - environment_uri=pipeline.environmentUri, - target_type='cdkpipeline', - target_uri=pipeline.DataPipelineUri, - target_label=pipeline.label, - payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, - ) - else: - Stack.create_stack( - session=session, - environment_uri=pipeline.environmentUri, - target_type='pipeline', - target_uri=pipeline.DataPipelineUri, - target_label=pipeline.label, - payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, - ) - - stack_helper.deploy_stack(context, pipeline.DataPipelineUri) - - return pipeline - - -def create_pipeline_environment(context: Context, source, input=None): - with context.engine.scoped_session() as session: - pipeline_env = Pipeline.create_pipeline_environment( - session=session, - username=context.username, - groups=context.groups, - data=input, - check_perm=True, - ) - return pipeline_env - - -def update_pipeline(context: Context, source, DataPipelineUri: str, input: dict = None): - with context.engine.scoped_session() as session: - pipeline = Pipeline.update_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=DataPipelineUri, - data=input, - check_perm=True, - ) - if (pipeline.template == ""): - stack_helper.deploy_stack(context, pipeline.DataPipelineUri) - - return pipeline - - -def list_pipelines(context: Context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return Pipeline.paginated_user_pipelines( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=None, - ) - - -def get_pipeline(context: Context, source, DataPipelineUri: str = None): - with context.engine.scoped_session() as session: - return Pipeline.get_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=DataPipelineUri, - data=None, - check_perm=True, - ) - - -def get_pipeline_env(context: Context, source: models.DataPipeline, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - env = session.query(models.Environment).get(source.environmentUri) - return env - - -def resolve_user_role(context: Context, source: models.DataPipeline): - if not source: - return None - if context.username and source.owner == context.username: - return DataPipelineRole.Creator.value - elif context.groups and source.SamlGroupName in context.groups: - return DataPipelineRole.Admin.value - return DataPipelineRole.NoPermission.value - - -def get_pipeline_environment(context: Context, source: models.DataPipelineEnvironment, **kwargs): - with context.engine.scoped_session() as session: - return Pipeline.get_pipeline_environment( - session=session, - username=context.username, - groups=context.groups, - uri=source.envPipelineUri, - data=None, - check_perm=True, - ) - - -def list_pipeline_environments(context: Context, source: models.DataPipeline, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return Pipeline.paginated_pipeline_environments( - session=session, - username=context.username, - groups=context.groups, - uri=source.DataPipelineUri, - data=filter, - check_perm=None, - ) - - -def get_pipeline_org(context: Context, source: models.DataPipeline, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - env = session.query(models.Environment).get(source.environmentUri) - org = session.query(models.Organization).get(env.organizationUri) - return org - - -def get_clone_url_http(context: Context, source: models.DataPipeline, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - env: models.Environment = session.query(models.Environment).get( - source.environmentUri - ) - return f'codecommit::{env.region}://{source.repo}' - - -def cat(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - Pipeline.get_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=input['DataPipelineUri'], - data=None, - check_perm=True, - ) - task = models.Task( - action='repo.datapipeline.cat', - targetUri=input.get('DataPipelineUri'), - payload={ - 'absolutePath': input.get('absolutePath'), - 'branch': input.get('branch', 'master'), - }, - ) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - ) - return response[0]['response'].decode('ascii') - - -def ls(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - Pipeline.get_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=input['DataPipelineUri'], - data=None, - check_perm=True, - ) - task = models.Task( - action='repo.datapipeline.ls', - targetUri=input.get('DataPipelineUri'), - payload={ - 'folderPath': input.get('folderPath', '/'), - 'branch': input.get('branch', 'master'), - }, - ) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - ) - return json.dumps(response[0]['response']) - - -def list_branches(context: Context, source, DataPipelineUri: str = None): - with context.engine.scoped_session() as session: - Pipeline.get_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=DataPipelineUri, - data=None, - check_perm=True, - ) - task = models.Task(action='repo.datapipeline.branches', targetUri=DataPipelineUri) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - ) - return response[0]['response'] - - -def get_stack(context, source: models.DataPipeline, **kwargs): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.DataPipelineUri, - environmentUri=source.environmentUri, - ) - - -def get_job_runs(context, source: models.DataPipeline, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - task = models.Task(targetUri=source.DataPipelineUri, action='glue.job.runs') - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - return response['response'] - - -def get_pipeline_executions(context: Context, source: models.DataPipeline, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - task = models.Task( - targetUri=source.DataPipelineUri, action='datapipeline.pipeline.executions' - ) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - return response['response'] - - -def get_creds(context: Context, source, DataPipelineUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=DataPipelineUri, - permission_name=permissions.CREDENTIALS_PIPELINE, - ) - pipeline = Pipeline.get_pipeline_by_uri(session, DataPipelineUri) - env = Environment.get_environment_by_uri(session, pipeline.environmentUri) - - env_role_arn = env.EnvironmentDefaultIAMRoleArn - - body = _get_creds_from_aws(pipeline, env_role_arn) - - return body - - -def _get_creds_from_aws(pipeline, env_role_arn): - aws_account_id = pipeline.AwsAccountId - aws_session = SessionHelper.remote_session(aws_account_id) - env_session = SessionHelper.get_session(aws_session, role_arn=env_role_arn) - c = env_session.get_credentials() - body = json.dumps( - { - 'AWS_ACCESS_KEY_ID': c.access_key, - 'AWS_SECRET_ACCESS_KEY': c.secret_key, - 'AWS_SESSION_TOKEN': c.token, - } - ) - return body - - -def list_pipeline_state_machine_executions( - context: Context, source, DataPipelineUri: str = None, stage: str = None -): - with context.engine.scoped_session() as session: - pipeline = Pipeline.get_pipeline( - session=session, - username=context.username, - groups=context.groups, - uri=DataPipelineUri, - data=None, - check_perm=True, - ) - - env = Environment.get_environment_by_uri(session, pipeline.environmentUri) - - executions = helpers.list_executions( - state_machine_name=pipeline.name, env=env, stage='Prod' - ) - - return { - 'count': len(executions), - 'page': 1, - 'pages': 4, - 'hasNext': False, - 'hasPrevious': False, - 'nodes': executions, - } - - -def start_pipeline(context: Context, source, DataPipelineUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=DataPipelineUri, - permission_name=permissions.START_PIPELINE, - ) - - pipeline = Pipeline.get_pipeline_by_uri(session, DataPipelineUri) - - env = Environment.get_environment_by_uri(session, pipeline.environmentUri) - - execution_arn = helpers.run_pipeline(state_machine_name=pipeline.name, env=env) - - return execution_arn - - -def delete_pipeline( - context: Context, source, DataPipelineUri: str = None, deleteFromAWS: bool = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=DataPipelineUri, - permission_name=permissions.DELETE_PIPELINE, - ) - - pipeline: models.DataPipeline = Pipeline.get_pipeline_by_uri( - session, DataPipelineUri - ) - - env: models.Environment = Environment.get_environment_by_uri( - session, pipeline.environmentUri - ) - - Pipeline.delete_pipeline_environments(session, DataPipelineUri) - - KeyValueTag.delete_key_value_tags(session, pipeline.DataPipelineUri, 'pipeline') - - session.delete(pipeline) - - ResourcePolicy.delete_resource_policy( - session=session, - resource_uri=pipeline.DataPipelineUri, - group=pipeline.SamlGroupName, - ) - - if deleteFromAWS: - stack_helper.delete_repository( - context=context, - target_uri=DataPipelineUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - repo_name=pipeline.repo, - ) - if pipeline.devStrategy == "cdk-trunk": - stack_helper.delete_stack( - context=context, - target_uri=DataPipelineUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='cdkpipeline', - ) - else: - stack_helper.delete_stack( - context=context, - target_uri=DataPipelineUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='pipeline', - ) - - return True - - -def delete_pipeline_environment(context: Context, source, envPipelineUri: str = None): - with context.engine.scoped_session() as session: - Pipeline.delete_pipeline_environment( - session=session, - username=context.username, - groups=context.groups, - envPipelineUri=envPipelineUri, - check_perm=True, - ) - return True - - -def update_pipeline_environment(context: Context, source, input=None): - with context.engine.scoped_session() as session: - pipeline_env = Pipeline.update_pipeline_environment( - session=session, - username=context.username, - groups=context.groups, - data=input, - uri=input['pipelineUri'], - check_perm=True, - ) - return pipeline_env diff --git a/backend/dataall/api/Objects/DataPipeline/schema.py b/backend/dataall/api/Objects/DataPipeline/schema.py deleted file mode 100644 index 72f00cac2..000000000 --- a/backend/dataall/api/Objects/DataPipeline/schema.py +++ /dev/null @@ -1,82 +0,0 @@ -from ... import gql -from .resolvers import * -from ...constants import DataPipelineRole - -DataPipeline = gql.ObjectType( - name='DataPipeline', - fields=[ - gql.Field('DataPipelineUri', type=gql.ID), - gql.Field('name', type=gql.String), - gql.Field('label', type=gql.String), - gql.Field('description', type=gql.String), - gql.Field('tags', type=gql.ArrayType(gql.String)), - gql.Field('created', type=gql.String), - gql.Field('updated', type=gql.String), - gql.Field('owner', type=gql.String), - gql.Field('repo', type=gql.String), - gql.Field('SamlGroupName', type=gql.String), - gql.Field( - 'organization', type=gql.Ref('Organization'), resolver=get_pipeline_org - ), - gql.Field( - 'environment', type=gql.Ref('Environment'), resolver=get_pipeline_env - ), - gql.Field( - 'developmentEnvironments', - type=gql.Ref('DataPipelineEnvironmentSearchResults'), - resolver=list_pipeline_environments, - ), - gql.Field('template', type=gql.String), - gql.Field('devStrategy', type=gql.String), - gql.Field('cloneUrlHttp', gql.String, resolver=get_clone_url_http), - gql.Field('stack', gql.Ref('Stack'), resolver=get_stack), - # gql.Field('cicdStack', gql.Ref('Stack'), resolver=get_cicd_stack), - gql.Field( - 'userRoleForPipeline', - type=DataPipelineRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - ], -) - -DataPipelineSearchResults = gql.ObjectType( - name='DataPipelineSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(DataPipeline)), - ], -) - - -DataPipelineEnvironment = gql.ObjectType( - name='DataPipelineEnvironment', - fields=[ - gql.Field(name='envPipelineUri', type=gql.String), - gql.Field(name='environmentUri', type=gql.String), - gql.Field(name='environmentLabel', type=gql.String), - gql.Field(name='pipelineUri', type=gql.String), - gql.Field(name='pipelineLabel', type=gql.String), - gql.Field(name='stage', type=gql.String), - gql.Field(name='order', type=gql.Integer), - gql.Field(name='region', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='samlGroupName', type=gql.String), - ], -) - - -DataPipelineEnvironmentSearchResults = gql.ObjectType( - name='DataPipelineEnvironmentSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(DataPipelineEnvironment)), - ], -) diff --git a/backend/dataall/api/Objects/Dataset/__init__.py b/backend/dataall/api/Objects/Dataset/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Dataset/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Dataset/input_types.py b/backend/dataall/api/Objects/Dataset/input_types.py deleted file mode 100644 index 79d140bda..000000000 --- a/backend/dataall/api/Objects/Dataset/input_types.py +++ /dev/null @@ -1,112 +0,0 @@ -from ... import gql -from ....api.constants import GraphQLEnumMapper, SortDirection - - -class DatasetSortField(GraphQLEnumMapper): - label = 'label' - created = 'created' - updated = 'updated' - - -NewDatasetInput = gql.InputType( - name='NewDatasetInput', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('organizationUri', gql.NonNullableType(gql.String)), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('owner', gql.String), - gql.Argument('language', gql.Ref('Language')), - gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), - gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), - gql.Argument(name='businessOwnerEmail', type=gql.String), - gql.Argument( - name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String) - ), - gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), - gql.Argument(name='stewards', type=gql.String), - ], -) - -ModifyDatasetInput = gql.InputType( - name='ModifyDatasetInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), - gql.Argument('terms', gql.ArrayType(gql.String)), - gql.Argument('businessOwnerDelegationEmails', gql.ArrayType(gql.String)), - gql.Argument('businessOwnerEmail', gql.String), - gql.Argument('language', gql.Ref('Language')), - gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), - gql.Argument(name='stewards', type=gql.String), - gql.Argument('KmsAlias', gql.NonNullableType(gql.String)), - ], -) - -DatasetSortCriteria = gql.InputType( - name='DatasetSortCriteria', - arguments=[ - gql.Argument( - name='field', type=gql.NonNullableType(DatasetSortField.toGraphQLEnum()) - ), - gql.Argument(name='direction', type=SortDirection.toGraphQLEnum()), - ], -) - - -DatasetFilter = gql.InputType( - name='DatasetFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('roles', gql.ArrayType(gql.Ref('DatasetRole'))), - gql.Argument('InProject', gql.String), - gql.Argument('notInProject', gql.String), - gql.Argument('displayArchived', gql.Boolean), - # gql.Argument("organization", gql.String), - # gql.Argument("environment", gql.String), - gql.Argument('sort', gql.ArrayType(DatasetSortCriteria)), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) - -DatasetPresignedUrlInput = gql.InputType( - name='DatasetPresignedUrlInput', - arguments=[ - gql.Field(name='fileName', type=gql.String), - gql.Field(name='prefix', type=gql.String), - ], -) - - -CrawlerInput = gql.InputType( - name='CrawlerInput', arguments=[gql.Argument(name='prefix', type=gql.String)] -) - -ImportDatasetInput = gql.InputType( - name='ImportDatasetInput', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('organizationUri', gql.NonNullableType(gql.String)), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('bucketName', gql.NonNullableType(gql.String)), - gql.Argument('glueDatabaseName', gql.String), - gql.Argument('KmsKeyAlias', gql.NonNullableType(gql.String)), - gql.Argument('adminRoleName', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('owner', gql.NonNullableType(gql.String)), - gql.Argument('language', gql.Ref('Language')), - gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), - gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), - gql.Argument(name='businessOwnerEmail', type=gql.String), - gql.Argument( - name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String) - ), - gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), - gql.Argument(name='stewards', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Dataset/mutations.py b/backend/dataall/api/Objects/Dataset/mutations.py deleted file mode 100644 index cc26c219c..000000000 --- a/backend/dataall/api/Objects/Dataset/mutations.py +++ /dev/null @@ -1,92 +0,0 @@ -from ... import gql -from .input_types import ( - ModifyDatasetInput, - NewDatasetInput, - ImportDatasetInput, -) -from .resolvers import * - -createDataset = gql.MutationField( - name='createDataset', - args=[gql.Argument(name='input', type=gql.NonNullableType(NewDatasetInput))], - type=gql.Ref('Dataset'), - resolver=create_dataset, - test_scope='Dataset', -) - -updateDataset = gql.MutationField( - name='updateDataset', - args=[ - gql.Argument(name='datasetUri', type=gql.String), - gql.Argument(name='input', type=ModifyDatasetInput), - ], - type=gql.Ref('Dataset'), - resolver=update_dataset, - test_scope='Dataset', -) - -syncTables = gql.MutationField( - name='syncTables', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetTableSearchResult'), - resolver=sync_tables, -) - - -generateDatasetAccessToken = gql.MutationField( - name='generateDatasetAccessToken', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=generate_dataset_access_token, -) - - -saveDatasetSummary = gql.MutationField( - name='saveDatasetSummary', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='content', type=gql.String), - ], - type=gql.Boolean, - resolver=save_dataset_summary, -) - - -deleteDataset = gql.MutationField( - name='deleteDataset', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - resolver=delete_dataset, - type=gql.Boolean, -) - - -importDataset = gql.MutationField( - name='importDataset', - args=[gql.Argument(name='input', type=ImportDatasetInput)], - type=gql.Ref('Dataset'), - resolver=import_dataset, - test_scope='Dataset', -) - -publishDatasetUpdate = gql.MutationField( - name='publishDatasetUpdate', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='s3Prefix', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_dataset_update, - type=gql.Boolean, -) - -StartGlueCrawler = gql.MutationField( - name='startGlueCrawler', - args=[ - gql.Argument(name='datasetUri', type=gql.String), - gql.Argument(name='input', type=gql.Ref('CrawlerInput')), - ], - resolver=start_crawler, - type=gql.Ref('GlueCrawler'), -) diff --git a/backend/dataall/api/Objects/Dataset/queries.py b/backend/dataall/api/Objects/Dataset/queries.py deleted file mode 100644 index c71408a1c..000000000 --- a/backend/dataall/api/Objects/Dataset/queries.py +++ /dev/null @@ -1,82 +0,0 @@ -from ... import gql -from .input_types import DatasetFilter -from .resolvers import * -from .schema import DatasetSearchResult - -getDataset = gql.QueryField( - name='getDataset', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Dataset'), - resolver=get_dataset, - test_scope='Dataset', -) - - -listDatasets = gql.QueryField( - name='listDatasets', - args=[gql.Argument('filter', DatasetFilter)], - type=DatasetSearchResult, - resolver=list_datasets, - test_scope='Dataset', -) - - -getDatasetAssumeRoleUrl = gql.QueryField( - name='getDatasetAssumeRoleUrl', - args=[gql.Argument(name='datasetUri', type=gql.String)], - type=gql.String, - resolver=get_dataset_assume_role_url, - test_scope='Dataset', -) - - -getDatasetETLCredentials = gql.QueryField( - name='getDatasetETLCredentials', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_dataset_etl_credentials, - test_scope='Dataset', -) - - -getDatasetSummary = gql.QueryField( - name='getDatasetSummary', - args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_dataset_summary, - test_scope='Dataset', -) - - -getDatasetPresignedUrl = gql.QueryField( - name='getDatasetPresignedUrl', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('DatasetPresignedUrlInput')), - ], - type=gql.String, - resolver=get_file_upload_presigned_url, -) - - -getGlueCrawlerStatus = gql.MutationField( - name='getGlueCrawlerStatus', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='name', type=gql.NonNullableType(gql.String)), - ], - resolver=lambda *_, **__: None, - type=gql.Ref('GlueCrawler'), -) - - -listShareObjects = gql.QueryField( - name='listDatasetShareObjects', - resolver=list_dataset_share_objects, - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - ], - type=gql.Ref('ShareSearchResult'), -) diff --git a/backend/dataall/api/Objects/Dataset/resolvers.py b/backend/dataall/api/Objects/Dataset/resolvers.py deleted file mode 100644 index 39758bbd1..000000000 --- a/backend/dataall/api/Objects/Dataset/resolvers.py +++ /dev/null @@ -1,680 +0,0 @@ -import json -import logging - -from botocore.config import Config -from botocore.exceptions import ClientError - -from ..Stack import stack_helper -from .... import db -from ....api.constants import ( - DatasetRole, -) -from ....api.context import Context -from ....aws.handlers.glue import Glue -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.kms import KMS - -from ....aws.handlers.quicksight import Quicksight -from ....db import paginate, exceptions, permissions, models -from ....db.api import Dataset, Environment, ShareObject, ResourcePolicy -from ....db.api.organization import Organization -from ....searchproxy import indexers - -log = logging.getLogger(__name__) - - -def check_dataset_account(environment): - if environment.dashboardsEnabled: - quicksight_subscription = Quicksight.check_quicksight_enterprise_subscription(AwsAccountId=environment.AwsAccountId) - if quicksight_subscription: - group = Quicksight.create_quicksight_group(AwsAccountId=environment.AwsAccountId) - return True if group else False - return True - - -def check_imported_resources(environment, kmsAlias): - if kmsAlias not in ["Undefined", "", "SSE-S3"]: - key_id = KMS.get_key_id( - account_id=environment.AwsAccountId, - region=environment.region, - key_alias=f"alias/{kmsAlias}" - ) - if not key_id: - raise exceptions.AWSResourceNotFound( - action=permissions.IMPORT_DATASET, - message=f'KMS key with alias={kmsAlias} cannot be found', - ) - return True - - -def create_dataset(context: Context, source, input=None): - with context.engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, input.get('environmentUri')) - check_dataset_account(environment=environment) - - dataset = Dataset.create_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=input.get('environmentUri'), - data=input, - check_perm=True, - ) - Dataset.create_dataset_stack(session, dataset) - - indexers.upsert_dataset( - session=session, es=context.es, datasetUri=dataset.datasetUri - ) - - stack_helper.deploy_dataset_stack(context, dataset) - - dataset.userRoleForDataset = DatasetRole.Creator.value - - return dataset - - -def import_dataset(context: Context, source, input=None): - if not input: - raise exceptions.RequiredParameter(input) - if not input.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not input.get('bucketName'): - raise exceptions.RequiredParameter('bucketName') - if not input.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('group') - - with context.engine.scoped_session() as session: - environment = Environment.get_environment_by_uri(session, input.get('environmentUri')) - check_dataset_account(environment=environment) - check_imported_resources(environment=environment, kmsAlias=input.get('KmsKeyAlias', "")) - - dataset = Dataset.create_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=input.get('environmentUri'), - data=input, - check_perm=True, - ) - dataset.imported = True - dataset.importedS3Bucket = True if input['bucketName'] else False - dataset.importedGlueDatabase = True if input.get('glueDatabaseName') else False - dataset.importedKmsKey = True if input.get('KmsKeyAlias') else False - dataset.importedAdminRole = True if input.get('adminRoleName') else False - dataset.KmsAlias = "SSE-S3" if input.get('KmsKeyAlias') == "" else input.get('KmsKeyAlias') - Dataset.create_dataset_stack(session, dataset) - - indexers.upsert_dataset( - session=session, es=context.es, datasetUri=dataset.datasetUri - ) - - stack_helper.deploy_dataset_stack(context, dataset) - - dataset.userRoleForDataset = DatasetRole.Creator.value - - return dataset - - -def get_dataset(context, source, datasetUri=None): - with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - ) - if dataset.SamlAdminGroupName in context.groups: - dataset.userRoleForDataset = DatasetRole.Admin.value - return dataset - - -def resolve_user_role(context: Context, source: models.Dataset, **kwargs): - if not source: - return None - if source.owner == context.username: - return DatasetRole.Creator.value - elif source.SamlAdminGroupName in context.groups: - return DatasetRole.Admin.value - elif source.stewards in context.groups: - return DatasetRole.DataSteward.value - else: - with context.engine.scoped_session() as session: - share = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == source.datasetUri) - .first() - ) - if share and ( - share.owner == context.username or share.principalId in context.groups - ): - return DatasetRole.Shared.value - return DatasetRole.NoPermission.value - - -def get_file_upload_presigned_url( - context, source, datasetUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - - s3_client = SessionHelper.remote_session(dataset.AwsAccountId).client( - 's3', - region_name=dataset.region, - config=Config(signature_version='s3v4', s3={'addressing_style': 'virtual'}), - ) - try: - s3_client.get_bucket_acl( - Bucket=dataset.S3BucketName, ExpectedBucketOwner=dataset.AwsAccountId - ) - response = s3_client.generate_presigned_post( - Bucket=dataset.S3BucketName, - Key=input.get('prefix', 'uploads') + '/' + input.get('fileName'), - ExpiresIn=15 * 60, - ) - - return json.dumps(response) - except ClientError as e: - raise e - - -def list_datasets(context: Context, source, filter: dict = None): - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Dataset.paginated_user_datasets( - session, context.username, context.groups, uri=None, data=filter - ) - - -def list_locations(context, source: models.Dataset, filter: dict = None): - if not source: - return None - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_locations( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - ) - - -def list_tables(context, source: models.Dataset, filter: dict = None): - if not source: - return None - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_tables( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - ) - - -def get_dataset_organization(context, source: models.Dataset, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return Organization.get_organization_by_uri(session, source.organizationUri) - - -def get_dataset_environment(context, source: models.Dataset, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return Environment.get_environment_by_uri(session, source.environmentUri) - - -def get_dataset_owners_group(context, source: models.Dataset, **kwargs): - if not source: - return None - return source.SamlAdminGroupName - - -def get_dataset_stewards_group(context, source: models.Dataset, **kwargs): - if not source: - return None - return source.stewards - - -def update_dataset(context, source, datasetUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - check_dataset_account(environment=environment) - check_imported_resources(environment=environment, kmsAlias=input.get('KmsAlias', "")) - updated_dataset = Dataset.update_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data=input, - check_perm=True, - ) - indexers.upsert_dataset(session, context.es, datasetUri) - - stack_helper.deploy_dataset_stack(context, updated_dataset) - - return updated_dataset - - -def get_dataset_statistics(context: Context, source: models.Dataset, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - count_tables = db.api.Dataset.count_dataset_tables(session, source.datasetUri) - count_locations = db.api.Dataset.count_dataset_locations( - session, source.datasetUri - ) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, source.datasetUri, {'targetType': 'dataset'} - ) - return { - 'tables': count_tables or 0, - 'locations': count_locations or 0, - 'upvotes': count_upvotes or 0, - } - - -def get_dataset_etl_credentials(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, - ) - task = models.Task(targetUri=datasetUri, action='iam.dataset.user.credentials') - session.add(task) - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - return json.dumps(response['response']) - - -def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - if dataset.SamlAdminGroupName not in context.groups: - share = ShareObject.get_share_by_dataset_attributes( - session=session, - dataset_uri=datasetUri, - dataset_owner=context.username - ) - shared_environment = Environment.get_environment_by_uri( - session=session, - uri=share.environmentUri - ) - env_group = Environment.get_environment_group( - session=session, - group_uri=share.principalId, - environment_uri=share.environmentUri - ) - role_arn = env_group.environmentIAMRoleArn - account_id = shared_environment.AwsAccountId - else: - role_arn = dataset.IAMDatasetAdminRoleArn - account_id = dataset.AwsAccountId - - pivot_session = SessionHelper.remote_session(account_id) - aws_session = SessionHelper.get_session( - base_session=pivot_session, role_arn=role_arn - ) - url = SessionHelper.get_console_access_url( - aws_session, - region=dataset.region, - bucket=dataset.S3BucketName, - ) - return url - - -def sync_tables(context: Context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.SYNC_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - - task = models.Task( - action='glue.dataset.database.tables', - targetUri=dataset.datasetUri, - ) - session.add(task) - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - with context.engine.scoped_session() as session: - indexers.upsert_dataset_tables( - session=session, es=context.es, datasetUri=dataset.datasetUri - ) - indexers.remove_deleted_tables( - session=session, es=context.es, datasetUri=dataset.datasetUri - ) - return Dataset.paginated_dataset_tables( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data={'page': 1, 'pageSize': 10}, - check_perm=None, - ) - - -def start_crawler(context: Context, source, datasetUri: str, input: dict = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.CRAWL_DATASET, - ) - - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - - location = ( - f's3://{dataset.S3BucketName}/{input.get("prefix")}' - if input.get('prefix') - else f's3://{dataset.S3BucketName}' - ) - - crawler = Glue.get_glue_crawler( - { - 'crawler_name': dataset.GlueCrawlerName, - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - } - ) - if not crawler: - raise exceptions.AWSResourceNotFound( - action=permissions.CRAWL_DATASET, - message=f'Crawler {dataset.GlueCrawlerName} can not be found', - ) - - task = models.Task( - targetUri=datasetUri, - action='glue.crawler.start', - payload={'location': location}, - ) - session.add(task) - session.commit() - - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - - return { - 'Name': dataset.GlueCrawlerName, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, - 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), - } - - -def list_dataset_share_objects(context, source, filter: dict = None): - if not source: - return None - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Dataset.paginated_dataset_shares( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - check_perm=True, - ) - - -def generate_dataset_access_token(context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.CREDENTIALS_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - aws_session = SessionHelper.get_session( - base_session=pivot_session, role_arn=dataset.IAMDatasetAdminRoleArn - ) - c = aws_session.get_credentials() - credentials = { - 'AccessKey': c.access_key, - 'SessionKey': c.secret_key, - 'sessionToken': c.token, - } - - return json.dumps(credentials) - - -def get_dataset_summary(context, source, datasetUri: str = None): - with context.engine.scoped_session() as session: - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - try: - s3.head_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - ) - response = s3.get_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - ) - content = str(response['Body'].read().decode('utf-8')) - return content - except Exception as e: - raise e - - -def save_dataset_summary( - context: Context, source, datasetUri: str = None, content: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.SUMMARY_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - environment = Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - - pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) - env_admin_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - s3 = env_admin_session.client('s3', region_name=dataset.region) - - s3.put_object( - Bucket=environment.EnvironmentDefaultBucketName, - Key=f'summary/{datasetUri}/summary.md', - Body=content, - ) - return True - - -def get_dataset_stack(context: Context, source: models.Dataset, **kwargs): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.datasetUri, - environmentUri=source.environmentUri, - ) - - -def get_crawler(context, source, datasetUri: str = None, name: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.CRAWL_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - - aws_session = SessionHelper.remote_session(dataset.AwsAccountId) - client = aws_session.client('glue', region_name=dataset.region) - - response = client.get_crawler(Name=name) - return { - 'Name': name, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, - 'status': response['Crawler'].get('LastCrawl', {}).get('Status', 'N/A'), - } - - -def delete_dataset( - context: Context, source, datasetUri: str = None, deleteFromAWS: bool = False -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.DELETE_DATASET, - ) - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, datasetUri) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - shares = db.api.Dataset.list_dataset_shares_with_existing_shared_items(session, datasetUri) - if shares: - raise exceptions.UnauthorizedOperation( - action=permissions.DELETE_DATASET, - message=f'Dataset {dataset.name} is shared with other teams. ' - 'Revoke all dataset shares before deletion.', - ) - redshift_datasets = db.api.Dataset.list_dataset_redshift_clusters( - session, datasetUri - ) - if redshift_datasets: - raise exceptions.UnauthorizedOperation( - action=permissions.DELETE_DATASET, - message='Dataset is used by Redshift clusters. ' - 'Remove clusters associations first.', - ) - - tables = [t.tableUri for t in Dataset.get_dataset_tables(session, datasetUri)] - for uri in tables: - indexers.delete_doc(es=context.es, doc_id=uri) - - folders = [f.locationUri for f in Dataset.get_dataset_folders(session, datasetUri)] - for uri in folders: - indexers.delete_doc(es=context.es, doc_id=uri) - - indexers.delete_doc(es=context.es, doc_id=datasetUri) - - Dataset.delete_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data=None, - check_perm=True, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - context=context, - target_uri=datasetUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='dataset', - ) - stack_helper.deploy_stack(context, dataset.environmentUri) - return True - - -def get_dataset_glossary_terms(context: Context, source: models.Dataset, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - terms = ( - session.query(models.GlossaryNode) - .join( - models.TermLink, models.TermLink.nodeUri == models.GlossaryNode.nodeUri - ) - .filter(models.TermLink.targetUri == source.datasetUri) - ) - - return paginate(terms, page_size=100, page=1).to_dict() - - -def publish_dataset_update( - context: Context, source, datasetUri: str = None, s3Prefix: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=datasetUri, - permission_name=permissions.SUBSCRIPTIONS_DATASET, - ) - dataset = Dataset.get_dataset_by_uri(session, datasetUri) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = models.Task( - targetUri=datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': s3Prefix}, - ) - session.add(task) - - response = Worker.process( - engine=context.engine, task_ids=[task.taskUri], save_response=False - )[0] - log.info(f'Dataset update publish response: {response}') - return True - - -def resolve_redshift_copy_enabled(context, source: models.Dataset, clusterUri: str): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.get_cluster_dataset( - session, clusterUri, source.datasetUri - ).datasetCopyEnabled diff --git a/backend/dataall/api/Objects/Dataset/schema.py b/backend/dataall/api/Objects/Dataset/schema.py deleted file mode 100644 index bede581a6..000000000 --- a/backend/dataall/api/Objects/Dataset/schema.py +++ /dev/null @@ -1,182 +0,0 @@ -from ... import gql -from .resolvers import * -from ...constants import DatasetRole, EnvironmentPermission - -DatasetStatistics = gql.ObjectType( - name='DatasetStatistics', - fields=[ - gql.Field(name='tables', type=gql.Integer), - gql.Field(name='locations', type=gql.Integer), - gql.Field(name='upvotes', type=gql.Integer), - ], -) - -Dataset = gql.ObjectType( - name='Dataset', - fields=[ - gql.Field(name='datasetUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueCrawlerName', type=gql.String), - gql.Field(name='GlueCrawlerSchedule', type=gql.String), - gql.Field(name='GlueProfilingJobName', type=gql.String), - gql.Field(name='GlueProfilingTriggerSchedule', type=gql.String), - gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), - gql.Field(name='KmsAlias', type=gql.String), - gql.Field(name='bucketCreated', type=gql.Boolean), - gql.Field(name='glueDatabaseCreated', type=gql.Boolean), - gql.Field(name='iamAdminRoleCreated', type=gql.Boolean), - gql.Field(name='lakeformationLocationCreated', type=gql.Boolean), - gql.Field(name='bucketPolicyCreated', type=gql.Boolean), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field(name='businessOwnerEmail', type=gql.String), - gql.Field(name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String)), - gql.Field(name='importedS3Bucket', type=gql.Boolean), - gql.Field(name='importedGlueDatabase', type=gql.Boolean), - gql.Field(name='importedKmsKey', type=gql.Boolean), - gql.Field(name='importedAdminRole', type=gql.Boolean), - gql.Field(name='imported', type=gql.Boolean), - gql.Field( - name='environment', - type=gql.Ref('Environment'), - resolver=get_dataset_environment, - ), - gql.Field( - name='organization', - type=gql.Ref('Organization'), - resolver=get_dataset_organization, - ), - gql.Field( - name='owners', - type=gql.String, - resolver=get_dataset_owners_group, - ), - gql.Field( - name='stewards', - type=gql.String, - resolver=get_dataset_stewards_group, - ), - gql.Field( - name='tables', - type=gql.Ref('DatasetTableSearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('DatasetTableFilter'))], - resolver=list_tables, - test_scope='Dataset', - ), - gql.Field( - name='locations', - type=gql.Ref('DatasetStorageLocationSearchResult'), - args=[ - gql.Argument( - name='filter', type=gql.Ref('DatasetStorageLocationFilter') - ) - ], - resolver=list_locations, - test_scope='Dataset', - ), - gql.Field( - name='userRoleForDataset', - type=DatasetRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='userRoleInEnvironment', type=EnvironmentPermission.toGraphQLEnum() - ), - gql.Field( - name='statistics', type=DatasetStatistics, resolver=get_dataset_statistics - ), - gql.Field( - name='shares', - args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], - type=gql.Ref('ShareSearchResult'), - resolver=list_dataset_share_objects, - test_scope='ShareObject', - test_cases=[ - 'anonymous', - 'businessowner', - 'admins', - 'stewards', - 'unauthorized', - ], - ), - gql.Field( - name='terms', - resolver=get_dataset_glossary_terms, - type=gql.Ref('TermSearchResult'), - ), - gql.Field(name='topics', type=gql.ArrayType(gql.Ref('Topic'))), - gql.Field( - name='confidentiality', type=gql.Ref('ConfidentialityClassification') - ), - gql.Field(name='language', type=gql.Ref('Language')), - gql.Field( - name='projectPermission', - args=[ - gql.Argument(name='projectUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.Ref('DatasetRole'), - ), - gql.Field( - name='redshiftClusterPermission', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.Ref('DatasetRole'), - ), - gql.Field( - name='redshiftDataCopyEnabled', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.Boolean, - resolver=resolve_redshift_copy_enabled, - ), - gql.Field( - name='isPublishedInEnvironment', - args=[ - gql.Argument( - name='environmentUri', type=gql.NonNullableType(gql.String) - ) - ], - type=gql.Boolean, - ), - gql.Field(name='stack', type=gql.Ref('Stack'), resolver=get_dataset_stack), - ], -) - - -DatasetSearchResult = gql.ObjectType( - name='DatasetSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(Dataset)), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) - - -GlueCrawler = gql.ObjectType( - name='GlueCrawler', - fields=[ - gql.Field(name='Name', type=gql.ID), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='status', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/DatasetProfiling/__init__.py b/backend/dataall/api/Objects/DatasetProfiling/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/DatasetProfiling/input_types.py b/backend/dataall/api/Objects/DatasetProfiling/input_types.py deleted file mode 100644 index deb1739c5..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/input_types.py +++ /dev/null @@ -1,20 +0,0 @@ -from ... import gql - -StartDatasetProfilingRunInput = gql.InputType( - name='StartDatasetProfilingRunInput', - arguments=[ - gql.Argument('datasetUri', gql.NonNullableType(gql.String)), - gql.Argument('GlueTableName', gql.String), - gql.Argument('tableUri', gql.String), - ], -) - - -DatasetProfilingRunFilter = gql.InputType( - name='DatasetProfilingRunFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument(name='term', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/DatasetProfiling/mutations.py b/backend/dataall/api/Objects/DatasetProfiling/mutations.py deleted file mode 100644 index 559129dc8..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/mutations.py +++ /dev/null @@ -1,9 +0,0 @@ -from ... import gql -from .resolvers import * - -startDatasetProfilingRun = gql.MutationField( - name='startDatasetProfilingRun', - args=[gql.Argument(name='input', type=gql.Ref('StartDatasetProfilingRunInput'))], - type=gql.Ref('DatasetProfilingRun'), - resolver=start_profiling_run, -) diff --git a/backend/dataall/api/Objects/DatasetProfiling/queries.py b/backend/dataall/api/Objects/DatasetProfiling/queries.py deleted file mode 100644 index 1cbe06764..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/queries.py +++ /dev/null @@ -1,17 +0,0 @@ -from ... import gql -from .resolvers import * - - -listDatasetTableProfilingRuns = gql.QueryField( - name='listDatasetTableProfilingRuns', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetProfilingRunSearchResults'), - resolver=list_table_profiling_runs, -) - -getDatasetTableLastProfilingRun = gql.QueryField( - name='getDatasetTableProfilingRun', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetProfilingRun'), - resolver=get_dataset_table_profiling_run, -) diff --git a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py b/backend/dataall/api/Objects/DatasetProfiling/resolvers.py deleted file mode 100644 index 11c19b888..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/resolvers.py +++ /dev/null @@ -1,163 +0,0 @@ -import json -import logging - -from .... import db -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import api, permissions, models -from ....db.api import ResourcePolicy - -log = logging.getLogger(__name__) - - -def resolve_dataset(context, source: models.DatasetProfilingRun): - if not source: - return None - with context.engine.scoped_session() as session: - return api.Dataset.get_dataset_by_uri( - session=session, dataset_uri=source.datasetUri - ) - - -def resolve_profiling_run_status(context: Context, source: models.DatasetProfilingRun): - if not source: - return None - with context.engine.scoped_session() as session: - task = models.Task( - targetUri=source.profilingRunUri, action='glue.job.profiling_run_status' - ) - session.add(task) - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - return source.status - - -def resolve_profiling_results(context: Context, source: models.DatasetProfilingRun): - if not source or source.results == {}: - return None - else: - return json.dumps(source.results) - - -def start_profiling_run(context: Context, source, input: dict = None): - """ - Triggers profiling jobs on a Table. - Only Dataset owners with PROFILE_DATASET_TABLE can perform this action - """ - with context.engine.scoped_session() as session: - - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=input['datasetUri'], - permission_name=permissions.PROFILE_DATASET_TABLE, - ) - dataset = api.Dataset.get_dataset_by_uri(session, input['datasetUri']) - - run = api.DatasetProfilingRun.start_profiling( - session=session, - datasetUri=dataset.datasetUri, - tableUri=input.get('tableUri'), - GlueTableName=input.get('GlueTableName'), - ) - - task = models.Task( - targetUri=run.profilingRunUri, action='glue.job.start_profiling_run' - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri]) - - return run - - -def get_dataset_table_profiling_run(context: Context, source, tableUri=None): - """ - Shows the results of the last profiling job on a Table. - For datasets "Unclassified" all users can perform this action. - For datasets "Secret" or "Official", only users with PREVIEW_DATASET_TABLE permissions can perform this action. - """ - with context.engine.scoped_session() as session: - _check_preview_permissions_if_needed(context=context, session=session, tableUri=tableUri) - run: models.DatasetProfilingRun = ( - api.DatasetProfilingRun.get_table_last_profiling_run( - session=session, tableUri=tableUri - ) - ) - - if run: - if not run.results: - table = api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) - environment = api.Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - content = _get_profiling_results_from_s3( - environment, dataset, table, run - ) - if content: - results = json.loads(content) - run.results = results - - if not run.results: - run_with_results = ( - api.DatasetProfilingRun.get_table_last_profiling_run_with_results( - session=session, tableUri=tableUri - ) - ) - if run_with_results: - run = run_with_results - - return run - - -def _get_profiling_results_from_s3(environment, dataset, table, run): - s3 = SessionHelper.remote_session(environment.AwsAccountId).client( - 's3', region_name=environment.region - ) - try: - key = f'profiling/results/{dataset.datasetUri}/{table.GlueTableName}/{run.GlueJobRunId}/results.json' - s3.head_object(Bucket=environment.EnvironmentDefaultBucketName, Key=key) - response = s3.get_object( - Bucket=environment.EnvironmentDefaultBucketName, Key=key - ) - content = str(response['Body'].read().decode('utf-8')) - return content - except Exception as e: - log.error( - f'Failed to retrieve S3 results for table profiling job ' - f'{table.GlueTableName}//{run.GlueJobRunId} due to {e}' - ) - - -def list_table_profiling_runs(context: Context, source, tableUri=None): - """ - Lists the runs of a profiling job on a Table. - For datasets "Unclassified" all users can perform this action. - For datasets "Secret" or "Official", only users with PREVIEW_DATASET_TABLE permissions can perform this action. - """ - with context.engine.scoped_session() as session: - _check_preview_permissions_if_needed(context=context, session=session, tableUri=tableUri) - return api.DatasetProfilingRun.list_table_profiling_runs( - session=session, tableUri=tableUri, filter={} - ) - - -def _check_preview_permissions_if_needed(context, session, tableUri): - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, tableUri - ) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) - if ( - dataset.confidentiality - != models.ConfidentialityClassification.Unclassified.value - ): - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=permissions.PREVIEW_DATASET_TABLE, - ) - return True diff --git a/backend/dataall/api/Objects/DatasetProfiling/schema.py b/backend/dataall/api/Objects/DatasetProfiling/schema.py deleted file mode 100644 index 88edbc403..000000000 --- a/backend/dataall/api/Objects/DatasetProfiling/schema.py +++ /dev/null @@ -1,38 +0,0 @@ -from ... import gql -from .resolvers import ( - resolve_dataset, - resolve_profiling_run_status, - resolve_profiling_results, -) - -DatasetProfilingRun = gql.ObjectType( - name='DatasetProfilingRun', - fields=[ - gql.Field(name='profilingRunUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='GlueJobName', type=gql.String), - gql.Field(name='GlueJobRunId', type=gql.String), - gql.Field(name='GlueTriggerSchedule', type=gql.String), - gql.Field(name='GlueTriggerName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='results', type=gql.String, resolver=resolve_profiling_results), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field('status', type=gql.String, resolver=resolve_profiling_run_status), - gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), - ], -) - -DatasetProfilingRunSearchResults = gql.ObjectType( - name='DatasetProfilingRunSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(DatasetProfilingRun)), - ], -) diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/__init__.py b/backend/dataall/api/Objects/DatasetStorageLocation/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/input_types.py b/backend/dataall/api/Objects/DatasetStorageLocation/input_types.py deleted file mode 100644 index f948bebad..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/input_types.py +++ /dev/null @@ -1,41 +0,0 @@ -from ... import gql - -NewDatasetStorageLocationInput = gql.InputType( - name='NewDatasetStorageLocationInput', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('terms', gql.ArrayType(gql.String)), - gql.Argument('prefix', gql.NonNullableType(gql.String)), - ], -) - -ModifyDatasetFolderInput = gql.InputType( - name='ModifyDatasetStorageLocationInput', - arguments=[ - gql.Argument('locationUri', gql.String), - gql.Argument('label', gql.String), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('terms', gql.ArrayType(gql.String)), - ], -) - -DatasetStorageLocationFilter = gql.InputType( - name='DatasetStorageLocationFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) - - -DatasetAccessPointFilter = gql.InputType( - name='DatasetAccessPointFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/mutations.py b/backend/dataall/api/Objects/DatasetStorageLocation/mutations.py deleted file mode 100644 index 5b89cc6c1..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/mutations.py +++ /dev/null @@ -1,44 +0,0 @@ -from ... import gql -from .input_types import ( - ModifyDatasetFolderInput, - NewDatasetStorageLocationInput, -) -from .resolvers import * -from .schema import DatasetStorageLocation - -createDatasetStorageLocation = gql.MutationField( - name='createDatasetStorageLocation', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=NewDatasetStorageLocationInput), - ], - type=gql.Thunk(lambda: DatasetStorageLocation), - resolver=create_storage_location, -) - -updateDatasetStorageLocation = gql.MutationField( - name='updateDatasetStorageLocation', - args=[ - gql.Argument(name='locationUri', type=gql.String), - gql.Argument(name='input', type=ModifyDatasetFolderInput), - ], - type=gql.Thunk(lambda: DatasetStorageLocation), - resolver=update_storage_location, -) - - -deleteDatasetStorageLocation = gql.MutationField( - name='deleteDatasetStorageLocation', - args=[gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String))], - resolver=remove_storage_location, - type=gql.Boolean, -) - -publishDatasetStorageLocationUpdate = gql.MutationField( - name='publishDatasetStorageLocationUpdate', - args=[ - gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_location_update, - type=gql.Boolean, -) diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/queries.py b/backend/dataall/api/Objects/DatasetStorageLocation/queries.py deleted file mode 100644 index 1baa5a7f9..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/queries.py +++ /dev/null @@ -1,9 +0,0 @@ -from ... import gql -from .resolvers import * - -getDatasetStorageLocation = gql.QueryField( - name='getDatasetStorageLocation', - args=[gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetStorageLocation'), - resolver=get_storage_location, -) diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py b/backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py deleted file mode 100644 index 1a4171444..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/resolvers.py +++ /dev/null @@ -1,139 +0,0 @@ -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.s3 import S3 -from ....db import permissions, models -from ....db.api import ( - ResourcePolicy, - Glossary, - DatasetStorageLocation, - Dataset, - Environment, -) -from ....searchproxy import indexers - - -def create_storage_location( - context, source, datasetUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - location = DatasetStorageLocation.create_dataset_location( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data=input, - check_perm=True, - ) - - S3.create_bucket_prefix(location) - - indexers.upsert_folder( - session=session, es=context.es, locationUri=location.locationUri - ) - return location - - -def list_dataset_locations(context, source, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return DatasetStorageLocation.list_dataset_locations( - session=session, uri=source.datasetUri, data=filter, check_perm=True - ) - - -def get_storage_location(context, source, locationUri=None): - with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - return DatasetStorageLocation.get_dataset_location( - session=session, - username=context.username, - groups=context.groups, - uri=location.datasetUri, - data={'locationUri': location.locationUri}, - check_perm=True, - ) - - -def update_storage_location( - context, source, locationUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - input['location'] = location - input['locationUri'] = location.locationUri - DatasetStorageLocation.update_dataset_location( - session=session, - username=context.username, - groups=context.groups, - uri=location.datasetUri, - data=input, - check_perm=True, - ) - indexers.upsert_folder(session, context.es, location.locationUri) - - return location - - -def remove_storage_location(context, source, locationUri: str = None): - with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - DatasetStorageLocation.delete_dataset_location( - session=session, - username=context.username, - groups=context.groups, - uri=location.datasetUri, - data={'locationUri': location.locationUri}, - check_perm=True, - ) - indexers.delete_doc(es=context.es, doc_id=location.locationUri) - return True - - -def resolve_dataset(context, source: models.DatasetStorageLocation, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - d = session.query(models.Dataset).get(source.datasetUri) - return d - - -def publish_location_update(context: Context, source, locationUri: str = None): - with context.engine.scoped_session() as session: - location = DatasetStorageLocation.get_location_by_uri(session, locationUri) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=location.datasetUri, - permission_name=permissions.UPDATE_DATASET_FOLDER, - ) - dataset = Dataset.get_dataset_by_uri(session, location.datasetUri) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - task = models.Task( - targetUri=location.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': location.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return True - - -def resolve_glossary_terms( - context: Context, source: models.DatasetStorageLocation, **kwargs -): - if not source: - return None - with context.engine.scoped_session() as session: - return Glossary.get_glossary_terms_links( - session, source.locationUri, 'DatasetStorageLocation' - ) diff --git a/backend/dataall/api/Objects/DatasetStorageLocation/schema.py b/backend/dataall/api/Objects/DatasetStorageLocation/schema.py deleted file mode 100644 index d05309f0b..000000000 --- a/backend/dataall/api/Objects/DatasetStorageLocation/schema.py +++ /dev/null @@ -1,76 +0,0 @@ -from ... import gql -from .resolvers import * - -DatasetStorageLocation = gql.ObjectType( - name='DatasetStorageLocation', - fields=[ - gql.Field(name='locationUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), - gql.Field(name='locationCreated', type=gql.Boolean), - gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), - gql.Field(name='userRoleForStorageLocation', type=gql.Ref('DatasetRole')), - gql.Field(name='environmentEndPoint', type=gql.String), - gql.Field( - name='terms', - type=gql.Ref('TermSearchResult'), - resolver=resolve_glossary_terms, - ), - ], -) - - -DatasetStorageLocationSearchResult = gql.ObjectType( - name='DatasetStorageLocationSearchResult', - fields=[ - gql.Field(name='nodes', type=gql.ArrayType(DatasetStorageLocation)), - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) - - -DatasetAccessPoint = gql.ObjectType( - name='DatasetAccessPoint', - fields=[ - gql.Field(name='accessPointUri', type=gql.ID), - gql.Field(name='location', type=DatasetStorageLocation), - gql.Field(name='dataset', type=gql.Ref('Dataset')), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), - gql.Field(name='S3AccessPointName', type=gql.String), - ], -) - - -DatasetAccessPointSearchResult = gql.ObjectType( - name='DatasetAccessPointSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Integer), - gql.Field(name='hasPrevious', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(DatasetAccessPoint)), - ], -) diff --git a/backend/dataall/api/Objects/DatasetTable/__init__.py b/backend/dataall/api/Objects/DatasetTable/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/DatasetTable/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/DatasetTable/input_types.py b/backend/dataall/api/Objects/DatasetTable/input_types.py deleted file mode 100644 index a5bd07998..000000000 --- a/backend/dataall/api/Objects/DatasetTable/input_types.py +++ /dev/null @@ -1,52 +0,0 @@ -from ... import gql -from ....api.constants import SortDirection, GraphQLEnumMapper - - -NewDatasetTableInput = gql.InputType( - name='NewDatasetTableInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('name', gql.NonNullableType(gql.String)), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('config', gql.String), - gql.Argument('region', gql.String), - ], -) - -ModifyDatasetTableInput = gql.InputType( - name='ModifyDatasetTableInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('prefix', gql.String), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('terms', gql.ArrayType(gql.String)), - gql.Argument('topics', gql.ArrayType(gql.String)), - ], -) - - -class DatasetSortField(GraphQLEnumMapper): - created = 'created' - updated = 'updated' - label = 'label' - - -DatasetSortCriteria = gql.InputType( - name='DatasetSortCriteria', - arguments=[ - gql.Argument(name='field', type=DatasetSortField.toGraphQLEnum()), - gql.Argument(name='direction', type=SortDirection.toGraphQLEnum()), - ], -) - -DatasetTableFilter = gql.InputType( - name='DatasetTableFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('sort', gql.ArrayType(DatasetSortCriteria)), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/DatasetTable/mutations.py b/backend/dataall/api/Objects/DatasetTable/mutations.py deleted file mode 100644 index 532605cff..000000000 --- a/backend/dataall/api/Objects/DatasetTable/mutations.py +++ /dev/null @@ -1,42 +0,0 @@ -from ... import gql -from .input_types import ( - ModifyDatasetTableInput, - NewDatasetTableInput, -) -from .resolvers import * - -createDatasetTable = gql.MutationField( - name='createDatasetTable', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=NewDatasetTableInput), - ], - type=gql.Ref('DatasetTable'), - resolver=create_table, -) - -updateDatasetTable = gql.MutationField( - name='updateDatasetTable', - args=[ - gql.Argument(name='tableUri', type=gql.String), - gql.Argument(name='input', type=ModifyDatasetTableInput), - ], - type=gql.Ref('DatasetTable'), - resolver=update_table, -) - -deleteDatasetTable = gql.MutationField( - name='deleteDatasetTable', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - type=gql.Boolean, - resolver=delete_table, -) - -publishDatasetTableUpdate = gql.MutationField( - name='publishDatasetTableUpdate', - args=[ - gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), - ], - resolver=publish_table_update, - type=gql.Boolean, -) diff --git a/backend/dataall/api/Objects/DatasetTable/queries.py b/backend/dataall/api/Objects/DatasetTable/queries.py deleted file mode 100644 index 8f7809e62..000000000 --- a/backend/dataall/api/Objects/DatasetTable/queries.py +++ /dev/null @@ -1,48 +0,0 @@ -from ... import gql -from .input_types import DatasetTableFilter -from .resolvers import * -from .schema import ( - DatasetTable, - DatasetTableSearchResult, -) - -getDatasetTable = gql.QueryField( - name='getDatasetTable', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - type=gql.Thunk(lambda: DatasetTable), - resolver=get_table, -) - - -listDatasetTables = gql.QueryField( - name='listDatasetTables', - args=[gql.Argument('filter', DatasetTableFilter)], - type=DatasetTableSearchResult, - resolver=lambda *_, **__: None, -) - - -QueryPreviewResult = gql.ObjectType( - name='QueryPreviewResult', - fields=[ - gql.Field(name='fields', type=gql.ArrayType(gql.String)), - gql.Field(name='rows', type=gql.ArrayType(gql.String)), - ], -) - -previewTable2 = gql.QueryField( - name='previewTable2', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - resolver=preview, - type=gql.Ref('QueryPreviewResult'), -) - -getSharedDatasetTables = gql.QueryField( - name='getSharedDatasetTables', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='envUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.ArrayType(gql.Ref('SharedDatasetTableItem')), - resolver=list_shared_tables_by_env_dataset, -) diff --git a/backend/dataall/api/Objects/DatasetTable/resolvers.py b/backend/dataall/api/Objects/DatasetTable/resolvers.py deleted file mode 100644 index 83e016cfb..000000000 --- a/backend/dataall/api/Objects/DatasetTable/resolvers.py +++ /dev/null @@ -1,244 +0,0 @@ -import json -import logging - -from botocore.exceptions import ClientError -from pyathena import connect - -from .... import db -from ..Dataset.resolvers import get_dataset -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import permissions, models -from ....db.api import ResourcePolicy, Glossary -from ....searchproxy import indexers -from ....utils import json_utils, sql_utils - -log = logging.getLogger(__name__) - - -def create_table(context, source, datasetUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - table = db.api.DatasetTable.create_dataset_table( - session=session, - username=context.username, - groups=context.groups, - uri=datasetUri, - data=input, - check_perm=True, - ) - indexers.upsert_table(session, context.es, table.tableUri) - return table - - -def list_dataset_tables(context, source, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.DatasetTable.list_dataset_tables( - session=session, - username=context.username, - groups=context.groups, - uri=source.datasetUri, - data=filter, - check_perm=True, - ) - - -def get_table(context, source: models.Dataset, tableUri: str = None): - with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - return db.api.DatasetTable.get_dataset_table( - session=session, - username=context.username, - groups=context.groups, - uri=table.datasetUri, - data={ - 'tableUri': tableUri, - }, - check_perm=True, - ) - - -def update_table(context, source, tableUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) - - input['table'] = table - input['tableUri'] = table.tableUri - - db.api.DatasetTable.update_dataset_table( - session=session, - username=context.username, - groups=context.groups, - uri=dataset.datasetUri, - data=input, - check_perm=True, - ) - indexers.upsert_table(session, context.es, table.tableUri) - return table - - -def delete_table(context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - db.api.DatasetTable.delete_dataset_table( - session=session, - username=context.username, - groups=context.groups, - uri=table.datasetUri, - data={ - 'tableUri': tableUri, - }, - check_perm=True, - ) - indexers.delete_doc(es=context.es, doc_id=tableUri) - return True - - -def preview(context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, tableUri - ) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) - if ( - dataset.confidentiality - != models.ConfidentialityClassification.Unclassified.value - ): - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=permissions.PREVIEW_DATASET_TABLE, - ) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - env_workgroup = {} - boto3_session = SessionHelper.remote_session(accountid=table.AWSAccountId) - creds = boto3_session.get_credentials() - try: - env_workgroup = boto3_session.client( - 'athena', region_name=env.region - ).get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) - except ClientError as e: - log.info( - f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' - f'due to: {e}' - ) - - connection = connect( - aws_access_key_id=creds.access_key, - aws_secret_access_key=creds.secret_key, - aws_session_token=creds.token, - work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), - s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset.datasetUri}/{table.tableUri}', - region_name=table.region, - ) - cursor = connection.cursor() - - SQL = 'select * from {table_identifier} limit 50'.format( - table_identifier=sql_utils.Identifier(table.GlueDatabaseName, table.GlueTableName) - ) - cursor.execute(SQL) - fields = [] - for f in cursor.description: - fields.append(json.dumps({'name': f[0]})) - rows = [] - for row in cursor: - rows.append(json.dumps(json_utils.to_json(list(row)))) - - return {'rows': rows, 'fields': fields} - - -def get_glue_table_properties(context: Context, source: models.DatasetTable, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, source.tableUri - ) - return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') - - -def resolve_dataset(context, source: models.DatasetTable, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - dataset_with_role = get_dataset( - context, source=None, datasetUri=source.datasetUri - ) - if not dataset_with_role: - return None - return dataset_with_role - - -def resolve_glossary_terms(context: Context, source: models.DatasetTable, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return Glossary.get_glossary_terms_links( - session, source.tableUri, 'DatasetTable' - ) - - -def publish_table_update(context: Context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, - ) - dataset = db.api.Dataset.get_dataset_by_uri(session, table.datasetUri) - env = db.api.Environment.get_environment_by_uri(session, dataset.environmentUri) - if not env.subscriptionsEnabled or not env.subscriptionsProducersTopicName: - raise Exception( - 'Subscriptions are disabled. ' - "First enable subscriptions for this dataset's environment then retry." - ) - - task = models.Task( - targetUri=table.datasetUri, - action='sns.dataset.publish_update', - payload={'s3Prefix': table.S3Prefix}, - ) - session.add(task) - - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return True - - -def resolve_redshift_copy_schema(context, source: models.DatasetTable, clusterUri: str): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.get_cluster_dataset_table( - session, clusterUri, source.datasetUri, source.tableUri - ).schema - - -def resolve_redshift_copy_location( - context, source: models.DatasetTable, clusterUri: str -): - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.get_cluster_dataset_table( - session, clusterUri, source.datasetUri, source.tableUri - ).dataLocation - - -def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str, filter: dict = None): - with context.engine.scoped_session() as session: - return db.api.DatasetTable.get_dataset_tables_shared_with_env( - session, - envUri, - datasetUri - ) diff --git a/backend/dataall/api/Objects/DatasetTable/schema.py b/backend/dataall/api/Objects/DatasetTable/schema.py deleted file mode 100644 index dc1cffcb4..000000000 --- a/backend/dataall/api/Objects/DatasetTable/schema.py +++ /dev/null @@ -1,139 +0,0 @@ -from ..DatasetTableColumn.resolvers import list_table_columns -from ... import gql -from .resolvers import * -from ...constants import GraphQLEnumMapper - -TablePermission = gql.ObjectType( - name='TablePermission', - fields=[ - gql.Field(name='userName', type=gql.String), - gql.Field(name='created', type=gql.String), - ], -) - -TablePermissionSearchResult = gql.ObjectType( - name='TablePermissionSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(TablePermission)), - ], -) - - -DatasetTable = gql.ObjectType( - name='DatasetTable', - fields=[ - gql.Field(name='tableUri', type=gql.ID), - gql.Field(name='datasetUri', type=gql.String), - gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - gql.Field(name='LastGlueTableStatus', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), - gql.Field(name='GlueTableConfig', type=gql.String), - gql.Field( - name='GlueTableProperties', - type=gql.String, - resolver=get_glue_table_properties, - ), - gql.Field(name='region', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='userRoleForTable', type=gql.Ref('DatasetRole')), - gql.Field(name='stage', type=gql.String), - gql.Field( - name='columns', - resolver=list_table_columns, - type=gql.Ref('DatasetTableColumnSearchResult'), - ), - gql.Field( - name='RedshiftSchema', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.String, - resolver=resolve_redshift_copy_schema, - ), - gql.Field( - name='RedshiftCopyDataLocation', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)) - ], - type=gql.String, - resolver=resolve_redshift_copy_location, - ), - gql.Field( - name='terms', - type=gql.Ref('TermSearchResult'), - resolver=resolve_glossary_terms, - ), - ], -) - - -DatasetTableSearchResult = gql.ObjectType( - name='DatasetTableSearchResult', - fields=[ - gql.Field(name='nodes', type=gql.ArrayType(DatasetTable)), - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) - - -DatasetTableRecord = gql.ObjectType( - name='DatasetTableRecord', fields=[gql.Field(name='data', type=gql.String)] -) - -DatasetTableMetadataItem = gql.ObjectType( - name='DatasetTableMetadataItem', - fields=[ - gql.Field(name='Name', type=gql.String), - gql.Field(name='Type', type=gql.String), - ], -) - - -class DatasetTablePreviewStatus(GraphQLEnumMapper): - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - SUCCEEDED = 'SUCCEEDED' - FAILED = 'FAILED' - CANCELLED = 'CANCELLED' - - -DatasetTablePreviewResult = gql.ObjectType( - name='DatasetTablePreviewResult', - fields=[ - gql.Field(name='queryExecutionId', type=gql.ID), - gql.Field(name='status', type=DatasetTablePreviewStatus.toGraphQLEnum()), - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(DatasetTableRecord)), - gql.Field(name='schema', type=gql.ArrayType(DatasetTableMetadataItem)), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) - -SharedDatasetTableItem = gql.ObjectType( - name='SharedDatasetTableItem', - fields=[ - gql.Field(name='tableUri', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - ] -) diff --git a/backend/dataall/api/Objects/DatasetTableColumn/__init__.py b/backend/dataall/api/Objects/DatasetTableColumn/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/DatasetTableColumn/input_types.py b/backend/dataall/api/Objects/DatasetTableColumn/input_types.py deleted file mode 100644 index 24fbbdbca..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/input_types.py +++ /dev/null @@ -1,20 +0,0 @@ -from ... import gql - -DatasetTableColumnFilter = gql.InputType( - name='DatasetTableColumnFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) - -DatasetTableColumnInput = gql.InputType( - name='DatasetTableColumnInput', - arguments=[ - gql.Argument('description', gql.String), - gql.Argument('classification', gql.Integer), - gql.Argument('tags', gql.Integer), - gql.Argument('topics', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/DatasetTableColumn/mutations.py b/backend/dataall/api/Objects/DatasetTableColumn/mutations.py deleted file mode 100644 index 012d83ea7..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/mutations.py +++ /dev/null @@ -1,20 +0,0 @@ -from ... import gql -from .resolvers import * - -syncDatasetTableColumns = gql.MutationField( - name='syncDatasetTableColumns', - args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetTableColumnSearchResult'), - resolver=sync_table_columns, -) - - -updateDatasetTableColumn = gql.MutationField( - name='updateDatasetTableColumn', - args=[ - gql.Argument(name='columnUri', type=gql.String), - gql.Argument(name='input', type=gql.Ref('DatasetTableColumnInput')), - ], - type=gql.Ref('DatasetTableColumn'), - resolver=update_table_column, -) diff --git a/backend/dataall/api/Objects/DatasetTableColumn/queries.py b/backend/dataall/api/Objects/DatasetTableColumn/queries.py deleted file mode 100644 index 4f5f05646..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/queries.py +++ /dev/null @@ -1,12 +0,0 @@ -from ... import gql -from .resolvers import * - -listDatasetTableColumns = gql.QueryField( - name='listDatasetTableColumns', - args=[ - gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetTableColumnFilter')), - ], - type=gql.Ref('DatasetTableColumnSearchResult'), - resolver=list_table_columns, -) diff --git a/backend/dataall/api/Objects/DatasetTableColumn/resolvers.py b/backend/dataall/api/Objects/DatasetTableColumn/resolvers.py deleted file mode 100644 index 88bf2c728..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/resolvers.py +++ /dev/null @@ -1,103 +0,0 @@ -from sqlalchemy import or_ - -from .... import db -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....db import paginate, permissions, models -from ....db.api import ResourcePolicy - - -def list_table_columns( - context: Context, - source: models.DatasetTable, - tableUri: str = None, - filter: dict = None, -): - if source: - tableUri = source.tableUri - if not filter: - filter = {} - with context.engine.scoped_session() as session: - if not source: - source = db.api.DatasetTable.get_dataset_table_by_uri(session, tableUri) - q = ( - session.query(models.DatasetTableColumn) - .filter( - models.DatasetTableColumn.tableUri == tableUri, - models.DatasetTableColumn.deleted.is_(None), - ) - .order_by(models.DatasetTableColumn.columnType.asc()) - ) - term = filter.get('term') - if term: - q = q.filter( - or_( - models.DatasetTableColumn.label.ilike('%' + term + '%'), - models.DatasetTableColumn.description.ilike('%' + term + '%'), - ) - ).order_by(models.DatasetTableColumn.columnType.asc()) - - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 65) - ).to_dict() - - -def sync_table_columns(context: Context, source, tableUri: str = None): - with context.engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, - ) - task = models.Task(action='glue.table.columns', targetUri=table.tableUri) - session.add(task) - Worker.process(engine=context.engine, task_ids=[task.taskUri], save_response=False) - return list_table_columns(context, source=table, tableUri=tableUri) - - -def resolve_terms(context, source: models.DatasetTableColumn, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - q = session.query(models.TermLink).filter( - models.TermLink.targetUri == source.columnUri - ) - return paginate(q, page=1, page_size=15).to_dict() - - -def update_table_column( - context: Context, source, columnUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - column: models.DatasetTableColumn = session.query( - models.DatasetTableColumn - ).get(columnUri) - if not column: - raise db.exceptions.ObjectNotFound('Column', columnUri) - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, column.tableUri - ) - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.datasetUri, - permission_name=permissions.UPDATE_DATASET_TABLE, - ) - column.description = input.get('description', 'No description provided') - session.add(column) - session.commit() - - task = models.Task( - action='glue.table.update_column', targetUri=column.columnUri - ) - session.add(task) - session.commit() - - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - return column diff --git a/backend/dataall/api/Objects/DatasetTableColumn/schema.py b/backend/dataall/api/Objects/DatasetTableColumn/schema.py deleted file mode 100644 index d571fc9a6..000000000 --- a/backend/dataall/api/Objects/DatasetTableColumn/schema.py +++ /dev/null @@ -1,42 +0,0 @@ -from ... import gql -from .resolvers import * - - -DatasetTableColumn = gql.ObjectType( - name='DatasetTableColumn', - fields=[ - gql.Field(name='tableUri', type=gql.ID), - gql.Field(name='columnUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - gql.Field(name='typeName', type=gql.String), - gql.Field(name='columnType', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='classification', type=gql.String), - gql.Field(name='topics', type=gql.ArrayType(gql.String)), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field( - name='terms', type=gql.Ref('TermLinkSearchResults'), resolver=resolve_terms - ), - ], -) - - -DatasetTableColumnSearchResult = gql.ObjectType( - name='DatasetTableColumnSearchResult', - fields=[ - gql.Field(name='nodes', type=gql.ArrayType(DatasetTableColumn)), - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) diff --git a/backend/dataall/api/Objects/Environment/__init__.py b/backend/dataall/api/Objects/Environment/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Environment/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Environment/input_types.py b/backend/dataall/api/Objects/Environment/input_types.py deleted file mode 100644 index d47cf8f22..000000000 --- a/backend/dataall/api/Objects/Environment/input_types.py +++ /dev/null @@ -1,134 +0,0 @@ -from ... import gql -from ....api.constants import GraphQLEnumMapper, SortDirection - - -AwsEnvironmentInput = gql.InputType( - name='AwsEnvironmentInput', - arguments=[ - gql.Argument('AwsAccountId', gql.NonNullableType(gql.String)), - gql.Argument('region', gql.NonNullableType(gql.String)), - ], -) - -NewEnvironmentInput = gql.InputType( - name='NewEnvironmentInput', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('organizationUri', gql.NonNullableType(gql.String)), - gql.Argument('SamlGroupName', gql.NonNullableType(gql.String)), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('AwsAccountId', gql.NonNullableType(gql.String)), - gql.Argument('region', gql.NonNullableType(gql.String)), - gql.Argument('dashboardsEnabled', type=gql.Boolean), - gql.Argument('notebooksEnabled', type=gql.Boolean), - gql.Argument('mlStudiosEnabled', type=gql.Boolean), - gql.Argument('pipelinesEnabled', type=gql.Boolean), - gql.Argument('warehousesEnabled', type=gql.Boolean), - gql.Argument('vpcId', gql.String), - gql.Argument('privateSubnetIds', gql.ArrayType(gql.String)), - gql.Argument('publicSubnetIds', gql.ArrayType(gql.String)), - gql.Argument('EnvironmentDefaultIAMRoleName', gql.String), - gql.Argument('resourcePrefix', gql.String), - ], -) - -ModifyEnvironmentInput = gql.InputType( - name='ModifyEnvironmentInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('description', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('SamlGroupName', gql.String), - gql.Argument('vpcId', gql.String), - gql.Argument('privateSubnetIds', gql.ArrayType(gql.String)), - gql.Argument('publicSubnetIds', gql.ArrayType(gql.String)), - gql.Argument('dashboardsEnabled', type=gql.Boolean), - gql.Argument('notebooksEnabled', type=gql.Boolean), - gql.Argument('mlStudiosEnabled', type=gql.Boolean), - gql.Argument('pipelinesEnabled', type=gql.Boolean), - gql.Argument('warehousesEnabled', type=gql.Boolean), - gql.Argument('resourcePrefix', gql.String), - ], -) - -EnableDataSubscriptionsInput = gql.InputType( - name='EnableDataSubscriptionsInput', - arguments=[ - gql.Argument('producersTopicArn', gql.String), - ], -) - - -class EnvironmentSortField(GraphQLEnumMapper): - created = 'created' - label = 'label' - - -EnvironmentSortCriteria = gql.InputType( - name='EnvironmentSortCriteria', - arguments=[ - gql.Argument( - name='field', type=gql.NonNullableType(EnvironmentSortField.toGraphQLEnum()) - ), - gql.Argument( - name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) - ), - ], -) - -EnvironmentFilter = gql.InputType( - name='EnvironmentFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('displayArchived', gql.Boolean), - gql.Argument('roles', gql.ArrayType(gql.Ref('EnvironmentPermission'))), - gql.Argument('quicksight', gql.Boolean), - gql.Argument('sort', gql.ArrayType(EnvironmentSortCriteria)), - gql.Argument('pageSize', gql.Integer), - ], -) - - -EnvironmentDataItemFilter = gql.InputType( - name='EnvironmentDataItemFilter', - arguments=[ - gql.Argument('itemTypes', gql.ArrayType(gql.String)), - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('uniqueShares', gql.Boolean) - ], -) - - -InviteGroupOnEnvironmentInput = gql.InputType( - name='InviteGroupOnEnvironmentInput', - arguments=[ - gql.Argument('permissions', gql.ArrayType(gql.String)), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - gql.Argument('groupUri', gql.NonNullableType(gql.String)), - gql.Argument('environmentIAMRoleName', gql.String), - ], -) - -AddConsumptionRoleToEnvironmentInput = gql.InputType( - name='AddConsumptionRoleToEnvironmentInput', - arguments=[ - gql.Argument('consumptionRoleName', gql.NonNullableType(gql.String)), - gql.Argument('groupUri', gql.NonNullableType(gql.String)), - gql.Argument('IAMRoleArn', gql.NonNullableType(gql.String)), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - ], -) - -ConsumptionRoleFilter = gql.InputType( - name='ConsumptionRoleFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument('groupUri', gql.NonNullableType(gql.String)), - ], -) diff --git a/backend/dataall/api/Objects/Environment/mutations.py b/backend/dataall/api/Objects/Environment/mutations.py deleted file mode 100644 index 45932d280..000000000 --- a/backend/dataall/api/Objects/Environment/mutations.py +++ /dev/null @@ -1,110 +0,0 @@ -from .input_types import ( - ModifyEnvironmentInput, - NewEnvironmentInput, - EnableDataSubscriptionsInput, - InviteGroupOnEnvironmentInput, - AddConsumptionRoleToEnvironmentInput -) -from .resolvers import * - -createEnvironment = gql.MutationField( - name='createEnvironment', - args=[gql.Argument(name='input', type=gql.NonNullableType(NewEnvironmentInput))], - type=gql.Ref('Environment'), - resolver=create_environment, - test_scope='Environment', -) - -updateEnvironment = gql.MutationField( - name='updateEnvironment', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.NonNullableType(ModifyEnvironmentInput)), - ], - type=gql.Ref('Environment'), - resolver=update_environment, - test_scope='Environment', -) - -inviteGroupOnEnvironment = gql.MutationField( - name='inviteGroupOnEnvironment', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(InviteGroupOnEnvironmentInput) - ) - ], - type=gql.Ref('Environment'), - resolver=invite_group, -) - -addConsumptionRoleToEnvironment = gql.MutationField( - name='addConsumptionRoleToEnvironment', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(AddConsumptionRoleToEnvironmentInput) - ) - ], - type=gql.Ref('ConsumptionRole'), - resolver=add_consumption_role, -) - -updateGroupPermission = gql.MutationField( - name='updateGroupEnvironmentPermissions', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(InviteGroupOnEnvironmentInput) - ) - ], - type=gql.Ref('Environment'), - resolver=update_group_permissions, -) - -removeGroupFromEnvironment = gql.MutationField( - name='removeGroupFromEnvironment', - args=[ - gql.Argument('environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument('groupUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('Environment'), - resolver=remove_group, -) - -removeConsumptionRoleFromEnvironment = gql.MutationField( - name='removeConsumptionRoleFromEnvironment', - args=[ - gql.Argument('environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument('consumptionRoleUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, - resolver=remove_consumption_role, -) - -deleteEnvironment = gql.MutationField( - name='deleteEnvironment', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - resolver=delete_environment, - type=gql.Boolean, -) - - -EnableDataSubscriptions = gql.MutationField( - name='enableDataSubscriptions', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=EnableDataSubscriptionsInput), - ], - resolver=enable_subscriptions, - type=gql.Boolean, -) - -DisableDataSubscriptions = gql.MutationField( - name='DisableDataSubscriptions', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - ], - resolver=disable_subscriptions, - type=gql.Boolean, -) diff --git a/backend/dataall/api/Objects/Environment/queries.py b/backend/dataall/api/Objects/Environment/queries.py deleted file mode 100644 index 0d593ea09..000000000 --- a/backend/dataall/api/Objects/Environment/queries.py +++ /dev/null @@ -1,202 +0,0 @@ -from .input_types import EnvironmentFilter -from .resolvers import * - -from .schema import Environment, EnvironmentSearchResult - - -getTrustAccount = gql.QueryField( - name='getTrustAccount', - type=gql.String, - resolver=get_trust_account, - test_scope='Environment', -) - -checkEnvironment = gql.QueryField( - name='checkEnvironment', - args=[gql.Argument(name='input', type=gql.Ref('AwsEnvironmentInput'))], - type=gql.String, - resolver=check_environment, - test_scope='Environment', -) - -getEnvironment = gql.QueryField( - name='getEnvironment', - args=[gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String))], - type=gql.Thunk(lambda: Environment), - resolver=get_environment, - test_scope='Environment', -) - - -listEnvironments = gql.QueryField( - name='listEnvironments', - args=[gql.Argument('filter', EnvironmentFilter)], - type=EnvironmentSearchResult, - resolver=list_environments, - test_scope='Environment', -) - -listEnvironmentNetworks = gql.QueryField( - name='listEnvironmentNetworks', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('VpcFilter')), - ], - resolver=list_environment_networks, - type=gql.Ref('VpcSearchResult'), - test_scope='Environment', -) - - -listDatasetsCreatedInEnvironment = gql.QueryField( - name='listDatasetsCreatedInEnvironment', - type=gql.Ref('DatasetSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), - ], - resolver=list_datasets_created_in_environment, - test_scope='Dataset', -) - - -searchEnvironmentDataItems = gql.QueryField( - name='searchEnvironmentDataItems', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), - ], - resolver=list_shared_with_environment_data_items, - type=gql.Ref('EnvironmentPublishedItemSearchResults'), - test_scope='Dataset', -) - - -generateEnvironmentAccessToken = gql.QueryField( - name='generateEnvironmentAccessToken', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.String), - ], - type=gql.String, - resolver=generate_environment_access_token, - test_scope='Environment', -) - - -getEnvironmentAssumeRoleUrl = gql.QueryField( - name='getEnvironmentAssumeRoleUrl', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.String), - ], - type=gql.String, - resolver=get_environment_assume_role_url, - test_scope='Environment', -) - -listEnvironmentRedshiftClusters = gql.QueryField( - name='listEnvironmentClusters', - type=gql.Ref('RedshiftClusterSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('RedshiftClusterFilter')), - ], - resolver=list_environment_redshift_clusters, -) - - -listEnvironmentInvitedGroups = gql.QueryField( - name='listEnvironmentInvitedGroups', - type=gql.Ref('GroupSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - resolver=list_environment_invited_groups, -) - -listEnvironmentGroups = gql.QueryField( - name='listEnvironmentGroups', - type=gql.Ref('GroupSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - resolver=list_environment_groups, -) - -listAllEnvironmentGroups = gql.QueryField( - name='listAllEnvironmentGroups', - type=gql.Ref('GroupSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - resolver=list_all_environment_groups, -) - -listEnvironmentConsumptionRoles = gql.QueryField( - name='listEnvironmentConsumptionRoles', - type=gql.Ref('ConsumptionRoleSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('ConsumptionRoleFilter')), - ], - resolver=list_environment_consumption_roles, -) - - -listAllEnvironmentConsumptionRoles = gql.QueryField( - name='listAllEnvironmentConsumptionRoles', - type=gql.Ref('ConsumptionRoleSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('ConsumptionRoleFilter')), - ], - resolver=list_all_environment_consumption_roles, -) - -listEnvironmentGroupInvitationPermissions = gql.QueryField( - name='listEnvironmentGroupInvitationPermissions', - args=[ - gql.Argument(name='environmentUri', type=gql.String), - ], - type=gql.ArrayType(gql.Ref('Permission')), - resolver=list_environment_group_invitation_permissions, -) - - -getPivotRolePresignedUrl = gql.QueryField( - name='getPivotRolePresignedUrl', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_pivot_role_template, - test_scope='Environment', -) - -getCDKExecPolicyPresignedUrl = gql.QueryField( - name='getCDKExecPolicyPresignedUrl', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_cdk_exec_policy_template, - test_scope='Environment', -) - - -getPivotRoleExternalId = gql.QueryField( - name='getPivotRoleExternalId', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_external_id, - test_scope='Environment', -) - - -getPivotRoleName = gql.QueryField( - name='getPivotRoleName', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_pivot_role_name, - test_scope='Environment', -) diff --git a/backend/dataall/api/Objects/Environment/resolvers.py b/backend/dataall/api/Objects/Environment/resolvers.py deleted file mode 100644 index 03ce2d964..000000000 --- a/backend/dataall/api/Objects/Environment/resolvers.py +++ /dev/null @@ -1,782 +0,0 @@ -import json -import logging -import os - -import boto3 -from botocore.config import Config -from botocore.exceptions import ClientError -from sqlalchemy import and_ - -from ..Organization.resolvers import * -from ..Stack import stack_helper -from ...constants import * -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.quicksight import Quicksight -from ....aws.handlers.cloudformation import CloudFormation -from ....aws.handlers.iam import IAM -from ....aws.handlers.parameter_store import ParameterStoreManager -from ....db import exceptions, permissions -from ....db.api import Environment, ResourcePolicy, Stack -from ....utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) - -log = logging.getLogger() - - -def get_trust_account(context: Context, source, **kwargs): - current_account = SessionHelper.get_account() - print('current_account = ', current_account) - return current_account - - -def get_pivot_role_as_part_of_environment(context: Context, source, **kwargs): - ssm_param = ParameterStoreManager.get_parameter_value(region=os.getenv('AWS_REGION', 'eu-west-1'), parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate") - return True if ssm_param == "True" else False - - -def check_environment(context: Context, source, account_id, region): - """ Checks necessary resources for environment deployment. - - Check CDKToolkit exists in Account assuming cdk_look_up_role - - Check Pivot Role exists in Account if pivot_role_as_part_of_environment is False - Args: - input: environment creation input - """ - pivot_role_as_part_of_environment = get_pivot_role_as_part_of_environment(context, source) - log.info(f"Creating environment. Pivot role as part of environment = {pivot_role_as_part_of_environment}") - ENVNAME = os.environ.get('envname', 'local') - if ENVNAME == 'pytest': - return 'CdkRoleName' - - cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( - accountid=account_id, region=region - ) - cdk_role_name = CloudFormation.check_existing_cdk_toolkit_stack( - AwsAccountId=account_id, region=region - ) - if not pivot_role_as_part_of_environment: - log.info("Check if PivotRole exist in the account") - pivot_role_arn = SessionHelper.get_delegation_role_arn(accountid=account_id) - role = IAM.get_role(account_id=account_id, role_arn=pivot_role_arn, role=cdk_look_up_role_arn) - if not role: - raise exceptions.AWSResourceNotFound( - action='CHECK_PIVOT_ROLE', - message='Pivot Role has not been created in the Environment AWS Account', - ) - - return cdk_role_name - - -def create_environment(context: Context, source, input=None): - if input.get('SamlGroupName') and input.get('SamlGroupName') not in context.groups: - raise exceptions.UnauthorizedOperation( - action=permissions.LINK_ENVIRONMENT, - message=f'User: {context.username} is not a member of the group {input["SamlGroupName"]}', - ) - - with context.engine.scoped_session() as session: - cdk_role_name = check_environment(context, source, - account_id=input.get('AwsAccountId'), - region=input.get('region') - ) - input['cdk_role_name'] = cdk_role_name - env = Environment.create_environment( - session=session, - username=context.username, - groups=context.groups, - uri=input.get('organizationUri'), - data=input, - check_perm=True, - ) - Stack.create_stack( - session=session, - environment_uri=env.environmentUri, - target_type='environment', - target_uri=env.environmentUri, - target_label=env.label, - ) - stack_helper.deploy_stack(context, targetUri=env.environmentUri) - env.userRoleInEnvironment = EnvironmentPermission.Owner.value - return env - - -def update_environment( - context: Context, source, environmentUri: str = None, input: dict = None -): - if input.get('SamlGroupName') and input.get('SamlGroupName') not in context.groups: - raise exceptions.UnauthorizedOperation( - action=permissions.LINK_ENVIRONMENT, - message=f'User: {context.username} is not part of the group {input["SamlGroupName"]}', - ) - - with context.engine.scoped_session() as session: - - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - cdk_role_name = check_environment(context, source, - account_id=environment.AwsAccountId, - region=environment.region - ) - - previous_resource_prefix = environment.resourcePrefix - - environment = db.api.Environment.update_environment( - session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=input, - check_perm=True, - ) - if input.get('dashboardsEnabled') or ( - environment.resourcePrefix != previous_resource_prefix - ): - stack_helper.deploy_stack( - context=context, targetUri=environment.environmentUri - ) - return environment - - -def invite_group(context: Context, source, input): - with context.engine.scoped_session() as session: - environment, environment_group = db.api.Environment.invite_group( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - - stack_helper.deploy_stack(context=context, targetUri=environment.environmentUri) - - return environment - - -def add_consumption_role(context: Context, source, input): - with context.engine.scoped_session() as session: - env = db.api.Environment.get_environment_by_uri(session, input['environmentUri']) - role = IAM.get_role(env.AwsAccountId, input['IAMRoleArn']) - if not role: - raise exceptions.AWSResourceNotFound( - action='ADD_CONSUMPTION_ROLE', - message=f"{input['IAMRoleArn']} does not exist in this account", - ) - consumption_role = db.api.Environment.add_consumption_role( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - - return consumption_role - - -def update_group_permissions(context, source, input): - with context.engine.scoped_session() as session: - environment = db.api.Environment.update_group_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - - stack_helper.deploy_stack(context=context, targetUri=environment.environmentUri) - - return environment - - -def remove_group(context: Context, source, environmentUri=None, groupUri=None): - with context.engine.scoped_session() as session: - environment = db.api.Environment.remove_group( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'groupUri': groupUri}, - check_perm=True, - ) - - stack_helper.deploy_stack(context=context, targetUri=environment.environmentUri) - - return environment - - -def remove_consumption_role(context: Context, source, environmentUri=None, consumptionRoleUri=None): - with context.engine.scoped_session() as session: - status = db.api.Environment.remove_consumption_role( - session=session, - username=context.username, - groups=context.groups, - uri=consumptionRoleUri, - data={'environmentUri': environmentUri}, - check_perm=True, - ) - - return status - - -def list_environment_invited_groups( - context: Context, source, environmentUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_invited_groups( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_environment_groups(context: Context, source, environmentUri=None, filter=None): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_user_environment_groups( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_all_environment_groups( - context: Context, source, environmentUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_all_environment_groups( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_environment_consumption_roles( - context: Context, source, environmentUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_user_environment_consumption_roles( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_all_environment_consumption_roles( - context: Context, source, environmentUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_all_environment_consumption_roles( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_environment_group_invitation_permissions( - context: Context, - source, - environmentUri=None, -): - with context.engine.scoped_session() as session: - return db.api.Environment.list_group_invitation_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - ) - - -def list_environments(context: Context, source, filter=None): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_user_environments( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def list_environment_networks( - context: Context, source, environmentUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_networks( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def get_parent_organization(context: Context, source, **kwargs): - org = get_organization(context, source, organizationUri=source.organizationUri) - return org - - -def resolve_vpc_list(context: Context, source, **kwargs): - with context.engine.scoped_session() as session: - return db.api.Vpc.get_environment_vpc_list( - session=session, environment_uri=source.environmentUri - ) - - -def get_environment(context: Context, source, environmentUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.GET_ENVIRONMENT, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - return environment - - -def resolve_user_role(context: Context, source: models.Environment): - if source.owner == context.username: - return EnvironmentPermission.Owner.value - elif source.SamlGroupName in context.groups: - return EnvironmentPermission.Admin.value - else: - with context.engine.scoped_session() as session: - env_group = ( - session.query(models.EnvironmentGroup) - .filter( - and_( - models.EnvironmentGroup.environmentUri == source.environmentUri, - models.EnvironmentGroup.groupUri.in_(context.groups), - ) - ) - .first() - ) - if env_group: - return EnvironmentPermission.Invited.value - return EnvironmentPermission.NotInvited.value - - -def list_environment_group_permissions( - context, source, environmentUri: str = None, groupUri: str = None -): - with context.engine.scoped_session() as session: - return db.api.Environment.list_group_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'groupUri': groupUri}, - check_perm=True, - ) - - -def list_datasets_created_in_environment( - context: Context, source, environmentUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_datasets( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def list_shared_with_environment_data_items( - context: Context, source, environmentUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_shared_with_environment_datasets( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def _get_environment_group_aws_session( - session, username, groups, environment, groupUri=None -): - if groupUri and groupUri not in groups: - raise exceptions.UnauthorizedOperation( - action='ENVIRONMENT_AWS_ACCESS', - message=f'User: {username} is not member of the team {groupUri}', - ) - pivot_session = SessionHelper.remote_session(environment.AwsAccountId) - if not groupUri: - if environment.SamlGroupName in groups: - aws_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - else: - raise exceptions.UnauthorizedOperation( - action='ENVIRONMENT_AWS_ACCESS', - message=f'User: {username} is not member of the environment admins team {environment.SamlGroupName}', - ) - else: - env_group: models.EnvironmentGroup = ( - session.query(models.EnvironmentGroup) - .filter( - models.EnvironmentGroup.environmentUri == environment.environmentUri, - models.EnvironmentGroup.groupUri == groupUri, - ) - .first() - ) - if not env_group: - raise exceptions.UnauthorizedOperation( - action='ENVIRONMENT_AWS_ACCESS', - message=f'Team {groupUri} is not invited to the environment {environment.name}', - ) - else: - aws_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=env_group.environmentIAMRoleArn, - ) - if not aws_session: - raise exceptions.AWSResourceNotFound( - action='ENVIRONMENT_AWS_ACCESS', - message=f'Failed to start an AWS session on environment {environment.AwsAccountId}', - ) - return aws_session - - -def get_environment_assume_role_url( - context: Context, - source, - environmentUri: str = None, - groupUri: str = None, -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.CREDENTIALS_ENVIRONMENT, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - url = SessionHelper.get_console_access_url( - _get_environment_group_aws_session( - session=session, - username=context.username, - groups=context.groups, - environment=environment, - groupUri=groupUri, - ), - region=environment.region, - ) - return url - - -def generate_environment_access_token( - context, source, environmentUri: str = None, groupUri: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.CREDENTIALS_ENVIRONMENT, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - c = _get_environment_group_aws_session( - session=session, - username=context.username, - groups=context.groups, - environment=environment, - groupUri=groupUri, - ).get_credentials() - credentials = { - 'AccessKey': c.access_key, - 'SessionKey': c.secret_key, - 'sessionToken': c.token, - } - return json.dumps(credentials) - - -def get_environment_stack(context: Context, source: models.Environment, **kwargs): - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.environmentUri, - environmentUri=source.environmentUri, - ) - - -def delete_environment( - context: Context, source, environmentUri: str = None, deleteFromAWS: bool = False -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.DELETE_ENVIRONMENT, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - - db.api.Environment.delete_environment( - session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'environment': environment}, - check_perm=True, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - context=context, - target_uri=environmentUri, - accountid=environment.AwsAccountId, - cdk_role_arn=environment.CDKRoleArn, - region=environment.region, - target_type='environment', - ) - - return True - - -def list_environment_redshift_clusters( - context: Context, source, environmentUri: str = None, filter: dict = None -): - if not filter: - filter = dict() - with context.engine.scoped_session() as session: - return Environment.paginated_environment_redshift_clusters( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=filter, - check_perm=True, - ) - - -def enable_subscriptions( - context: Context, source, environmentUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.ENABLE_ENVIRONMENT_SUBSCRIPTIONS, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - if input.get('producersTopicArn'): - environment.subscriptionsProducersTopicName = input.get('producersTopicArn') - environment.subscriptionsProducersTopicImported = True - - else: - environment.subscriptionsProducersTopicName = NamingConventionService( - target_label=f'{environment.label}-producers-topic', - target_uri=environment.environmentUri, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - - environment.subscriptionsConsumersTopicName = NamingConventionService( - target_label=f'{environment.label}-consumers-topic', - target_uri=environment.environmentUri, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - environment.subscriptionsConsumersTopicImported = False - environment.subscriptionsEnabled = True - session.commit() - stack_helper.deploy_stack(context=context, targetUri=environment.environmentUri) - return True - - -def disable_subscriptions(context: Context, source, environmentUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.ENABLE_ENVIRONMENT_SUBSCRIPTIONS, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - - environment.subscriptionsConsumersTopicName = None - environment.subscriptionsConsumersTopicImported = False - environment.subscriptionsProducersTopicName = None - environment.subscriptionsProducersTopicImported = False - environment.subscriptionsEnabled = False - session.commit() - stack_helper.deploy_stack(context=context, targetUri=environment.environmentUri) - return True - - -def get_pivot_role_template(context: Context, source, organizationUri=None): - from ....utils import Parameter - - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organizationUri, - permission_name=permissions.GET_ORGANIZATION, - ) - pivot_role_bucket = Parameter().get_parameter( - env=os.getenv('envname', 'local'), path='s3/resources_bucket_name' - ) - pivot_role_bucket_key = Parameter().get_parameter( - env=os.getenv('envname', 'local'), path='s3/pivot_role_prefix' - ) - if not pivot_role_bucket or not pivot_role_bucket_key: - raise exceptions.AWSResourceNotFound( - action='GET_PIVOT_ROLE_TEMPLATE', - message='Pivot Yaml template file could not be found on Amazon S3 bucket', - ) - try: - s3_client = boto3.client( - 's3', - region_name=os.getenv('AWS_REGION', 'eu-central-1'), - config=Config( - signature_version='s3v4', s3={'addressing_style': 'virtual'} - ), - ) - presigned_url = s3_client.generate_presigned_url( - 'get_object', - Params=dict( - Bucket=pivot_role_bucket, - Key=pivot_role_bucket_key, - ), - ExpiresIn=15 * 60, - ) - return presigned_url - except ClientError as e: - log.error( - f'Failed to get presigned URL for pivot role template due to: {e}' - ) - raise e - - -def get_cdk_exec_policy_template(context: Context, source, organizationUri=None): - from ....utils import Parameter - - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organizationUri, - permission_name=permissions.GET_ORGANIZATION, - ) - cdk_exec_policy_bucket = Parameter().get_parameter( - env=os.getenv('envname', 'local'), path='s3/resources_bucket_name' - ) - cdk_exec_policy_bucket_key = Parameter().get_parameter( - env=os.getenv('envname', 'local'), path='s3/cdk_exec_policy_prefix' - ) - if not cdk_exec_policy_bucket or not cdk_exec_policy_bucket_key: - raise exceptions.AWSResourceNotFound( - action='GET_CDK_EXEC_POLICY_TEMPLATE', - message='CDK Exec Yaml template file could not be found on Amazon S3 bucket', - ) - try: - s3_client = boto3.client( - 's3', - region_name=os.getenv('AWS_REGION', 'eu-central-1'), - config=Config( - signature_version='s3v4', s3={'addressing_style': 'virtual'} - ), - ) - presigned_url = s3_client.generate_presigned_url( - 'get_object', - Params=dict( - Bucket=cdk_exec_policy_bucket, - Key=cdk_exec_policy_bucket_key, - ), - ExpiresIn=15 * 60, - ) - return presigned_url - except ClientError as e: - log.error( - f'Failed to get presigned URL for CDK Exec role template due to: {e}' - ) - raise e - - -def get_external_id(context: Context, source, organizationUri=None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organizationUri, - permission_name=permissions.GET_ORGANIZATION, - ) - external_id = SessionHelper.get_external_id_secret() - if not external_id: - raise exceptions.AWSResourceNotFound( - action='GET_EXTERNAL_ID', - message='External Id could not be found on AWS Secretsmanager', - ) - return external_id - - -def get_pivot_role_name(context: Context, source, organizationUri=None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organizationUri, - permission_name=permissions.GET_ORGANIZATION, - ) - pivot_role_name = SessionHelper.get_delegation_role_name() - if not pivot_role_name: - raise exceptions.AWSResourceNotFound( - action='GET_PIVOT_ROLE_NAME', - message='Pivot role name could not be found on AWS Systems Manager - Parameter Store', - ) - return pivot_role_name diff --git a/backend/dataall/api/Objects/Environment/schema.py b/backend/dataall/api/Objects/Environment/schema.py deleted file mode 100644 index 528f7b649..000000000 --- a/backend/dataall/api/Objects/Environment/schema.py +++ /dev/null @@ -1,176 +0,0 @@ -from .resolvers import * -from ...constants import EnvironmentPermission - -EnvironmentUserPermission = gql.ObjectType( - name='EnvironmentUserPermission', - fields=[ - gql.Field(name='userName', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='userRoleInEnvironment', type=gql.Ref('EnvironmentPermission')), - ], -) - -EnvironmentUserPermissionSearchResult = gql.ObjectType( - name='EnvironmentUserPermissionSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(EnvironmentUserPermission)), - ], -) - - -EnvironmentGroupPermission = gql.ObjectType( - name='EnvironmentGroupPermission', - fields=[ - gql.Field(name='groupUri', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field( - name='groupRoleInEnvironment', type=EnvironmentPermission.toGraphQLEnum() - ), - gql.Field(name='Group', type=gql.Ref('Group')), - ], -) - -EnvironmentGroupPermissionSearchResult = gql.ObjectType( - name='EnvironmentGroupPermissionSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(EnvironmentGroupPermission)), - ], -) - - -Environment = gql.ObjectType( - name='Environment', - fields=[ - gql.Field(name='environmentUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='environmentType', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='SamlGroupName', type=gql.String), - gql.Field(name='resourcePrefix', type=gql.String), - gql.Field(name='EnvironmentDefaultIAMRoleArn', type=gql.String), - gql.Field(name='EnvironmentDefaultIAMRoleName', type=gql.String), - gql.Field(name='EnvironmentDefaultIAMRoleImported', type=gql.Boolean), - gql.Field(name='datasets', type=gql.String), - gql.Field( - name='organization', - type=gql.Ref('Organization'), - resolver=get_parent_organization, - ), - gql.Field( - 'userRoleInEnvironment', - type=EnvironmentPermission.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field('validated', type=gql.Boolean), - gql.Field('dashboardsEnabled', type=gql.Boolean), - gql.Field('notebooksEnabled', type=gql.Boolean), - gql.Field('mlStudiosEnabled', type=gql.Boolean), - gql.Field('pipelinesEnabled', type=gql.Boolean), - gql.Field('warehousesEnabled', type=gql.Boolean), - gql.Field('roleCreated', type=gql.Boolean), - gql.Field('isOrganizationDefaultEnvironment', type=gql.Boolean), - gql.Field('stack', type=gql.Ref('Stack'), resolver=get_environment_stack), - gql.Field('subscriptionsEnabled', type=gql.Boolean), - gql.Field('subscriptionsProducersTopicImported', type=gql.Boolean), - gql.Field('subscriptionsConsumersTopicImported', type=gql.Boolean), - gql.Field('subscriptionsConsumersTopicName', type=gql.String), - gql.Field('subscriptionsProducersTopicName', type=gql.String), - gql.Field('EnvironmentDefaultBucketName', type=gql.String), - gql.Field('EnvironmentDefaultAthenaWorkGroup', type=gql.String), - gql.Field( - name='networks', - type=gql.ArrayType(gql.Ref('Vpc')), - resolver=resolve_vpc_list, - ), - ], -) - - -EnvironmentSearchResult = gql.ObjectType( - name='EnvironmentSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(Environment)), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) - - -EnvironmentPublishedItem = gql.ObjectType( - name='EnvironmentPublishedItem', - fields=[ - gql.Field(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='datasetName', type=gql.NonNullableType(gql.String)), - gql.Field(name='itemAccess', type=gql.NonNullableType(gql.String)), - gql.Field(name='itemType', type=gql.NonNullableType(gql.String)), - gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Field(name='environmentName', type=gql.NonNullableType(gql.String)), - gql.Field(name='organizationUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='organizationName', type=gql.NonNullableType(gql.String)), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), - gql.Field(name='S3AccessPointName', type=gql.String), - ], -) - - -EnvironmentPublishedItemSearchResults = gql.ObjectType( - name='EnvironmentPublishedItemSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(EnvironmentPublishedItem)), - ], -) - -ConsumptionRole = gql.ObjectType( - name='ConsumptionRole', - fields=[ - gql.Field(name='consumptionRoleUri', type=gql.String), - gql.Field(name='consumptionRoleName', type=gql.String), - gql.Field(name='groupUri', type=gql.String), - gql.Field(name='environmentUri', type=gql.String), - gql.Field(name='IAMRoleArn', type=gql.String), - gql.Field(name='IAMRoleName', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - ], -) - -ConsumptionRoleSearchResult = gql.ObjectType( - name='ConsumptionRoleSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(ConsumptionRole)), - ], -) diff --git a/backend/dataall/api/Objects/Feed/__init__.py b/backend/dataall/api/Objects/Feed/__init__.py deleted file mode 100644 index 88cdd7cf2..000000000 --- a/backend/dataall/api/Objects/Feed/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from . import input_types, schema, resolvers, mutations, queries - -__all__ = ['input_types', 'mutations', 'queries', 'resolvers', 'schema'] diff --git a/backend/dataall/api/Objects/Feed/input_types.py b/backend/dataall/api/Objects/Feed/input_types.py deleted file mode 100644 index abdc15f06..000000000 --- a/backend/dataall/api/Objects/Feed/input_types.py +++ /dev/null @@ -1,14 +0,0 @@ -from ... import gql - -FeedMessageInput = gql.InputType( - name='FeedMessageInput', arguments=[gql.Argument(name='content', type=gql.String)] -) - -FeedMessageFilter = gql.InputType( - name='FeedMessageFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Feed/mutations.py b/backend/dataall/api/Objects/Feed/mutations.py deleted file mode 100644 index 852ca45df..000000000 --- a/backend/dataall/api/Objects/Feed/mutations.py +++ /dev/null @@ -1,14 +0,0 @@ -from ... import gql -from .resolvers import * - - -postFeedMessage = gql.MutationField( - name='postFeedMessage', - resolver=post_message, - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('FeedMessageInput')), - ], - type=gql.Ref('FeedMessage'), -) diff --git a/backend/dataall/api/Objects/Feed/queries.py b/backend/dataall/api/Objects/Feed/queries.py deleted file mode 100644 index aab6ff5a6..000000000 --- a/backend/dataall/api/Objects/Feed/queries.py +++ /dev/null @@ -1,13 +0,0 @@ -from ... import gql -from .resolvers import * - - -getFeed = gql.QueryField( - name='getFeed', - resolver=get_feed, - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('Feed'), -) diff --git a/backend/dataall/api/Objects/Feed/resolvers.py b/backend/dataall/api/Objects/Feed/resolvers.py deleted file mode 100644 index a6c0535de..000000000 --- a/backend/dataall/api/Objects/Feed/resolvers.py +++ /dev/null @@ -1,106 +0,0 @@ -from sqlalchemy import or_ - -from ....api.context import Context -from ....db import paginate, models - - -class Feed: - def __init__(self, targetUri: str = None, targetType: str = None): - self._targetUri = targetUri - self._targetType = targetType - - @property - def targetUri(self): - return self._targetUri - - @property - def targetType(self): - return self._targetType - - -def resolve_feed_target_type(obj, *_): - if isinstance(obj, models.DatasetTableColumn): - return 'DatasetTableColumn' - elif isinstance(obj, models.Worksheet): - return 'Worksheet' - elif isinstance(obj, models.DataPipeline): - return 'DataPipeline' - elif isinstance(obj, models.DatasetTable): - return 'DatasetTable' - elif isinstance(obj, models.Dataset): - return 'Dataset' - elif isinstance(obj, models.DatasetStorageLocation): - return 'DatasetStorageLocation' - elif isinstance(obj, models.Dashboard): - return 'Dashboard' - else: - return None - - -def resolve_target(context: Context, source: Feed, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - model = { - 'Dataset': models.Dataset, - 'DatasetTable': models.DatasetTable, - 'DatasetTableColumn': models.DatasetTableColumn, - 'DatasetStorageLocation': models.DatasetStorageLocation, - 'Dashboard': models.Dashboard, - 'DataPipeline': models.DataPipeline, - 'Worksheet': models.Worksheet, - }[source.targetType] - target = session.query(model).get(source.targetUri) - return target - - -def get_feed( - context: Context, - source, - targetUri: str = None, - targetType: str = None, - filter: dict = None, -) -> Feed: - return Feed(targetUri=targetUri, targetType=targetType) - - -def post_message( - context: Context, - source, - targetUri: str = None, - targetType: str = None, - input: dict = None, -): - with context.engine.scoped_session() as session: - m = models.FeedMessage( - targetUri=targetUri, - targetType=targetType, - creator=context.username, - content=input.get('content'), - ) - session.add(m) - return m - - -def resolve_messages(context: Context, source: Feed, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - q = session.query(models.FeedMessage).filter( - models.FeedMessage.targetUri == source.targetUri - ) - term = filter.get('term') - if term: - q = q.filter( - or_( - models.FeedMessage.content.ilike('%' + term + '%'), - models.FeedMessage.creator.ilike('%' + term + '%'), - ) - ) - q = q.order_by(models.FeedMessage.created.desc()) - - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 10) - ).to_dict() diff --git a/backend/dataall/api/Objects/Feed/schema.py b/backend/dataall/api/Objects/Feed/schema.py deleted file mode 100644 index d58918716..000000000 --- a/backend/dataall/api/Objects/Feed/schema.py +++ /dev/null @@ -1,56 +0,0 @@ -from ... import gql -from .resolvers import * - - -FeedTarget = gql.Union( - name='FeedTarget', - types=[ - gql.Ref('Dataset'), - gql.Ref('DatasetTable'), - gql.Ref('DatasetTableColumn'), - gql.Ref('DatasetStorageLocation'), - gql.Ref('DataPipeline'), - gql.Ref('Worksheet'), - gql.Ref('Dashboard'), - ], - resolver=resolve_feed_target_type, -) - -Feed = gql.ObjectType( - name='Feed', - fields=[ - gql.Field(name='feedTargetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='feedTargetType', type=gql.NonNullableType(gql.String)), - gql.Field(name='target', resolver=resolve_target, type=gql.Ref('FeedTarget')), - gql.Field( - name='messages', - args=[gql.Argument(name='filter', type=gql.Ref('FeedMessageFilter'))], - resolver=resolve_messages, - type=gql.Ref('FeedMessages'), - ), - ], -) - - -FeedMessage = gql.ObjectType( - name='FeedMessage', - fields=[ - gql.Field(name='feedMessageUri', type=gql.ID), - gql.Field(name='creator', type=gql.NonNullableType(gql.String)), - gql.Field(name='content', type=gql.String), - gql.Field(name='created', type=gql.String), - ], -) - - -FeedMessages = gql.ObjectType( - name='FeedMessages', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('FeedMessage'))), - ], -) diff --git a/backend/dataall/api/Objects/Glossary/__init__.py b/backend/dataall/api/Objects/Glossary/__init__.py deleted file mode 100644 index 0c4ec6166..000000000 --- a/backend/dataall/api/Objects/Glossary/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - queries, - mutations, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Glossary/input_types.py b/backend/dataall/api/Objects/Glossary/input_types.py deleted file mode 100644 index b60b82bd2..000000000 --- a/backend/dataall/api/Objects/Glossary/input_types.py +++ /dev/null @@ -1,110 +0,0 @@ -from ... import gql - -CreateGlossaryInput = gql.InputType( - name='CreateGlossaryInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), - gql.Argument(name='status', type=gql.String), - gql.Argument(name='admin', type=gql.String), - ], -) - -UpdateGlossaryInput = gql.InputType( - name='UpdateGlossaryInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='readme', type=gql.String), - gql.Argument(name='status', type=gql.String), - gql.Argument(name='admin', type=gql.String), - ], -) - - -CreateCategoryInput = gql.InputType( - name='CreateCategoryInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), - gql.Argument(name='status', type=gql.String), - ], -) - -UpdateCategoryInput = gql.InputType( - name='UpdateCategoryInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='readme', type=gql.String), - gql.Argument(name='status', type=gql.String), - ], -) - -CreateTermInput = gql.InputType( - name='CreateTermInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), - gql.Argument(name='status', type=gql.String), - ], -) - - -UpdateTermInput = gql.InputType( - name='UpdateTermInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='readme', type=gql.String), - gql.Argument(name='status', type=gql.String), - ], -) - - -GlossaryFilter = gql.InputType( - name='GlossaryFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='status', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -CategoryFilter = gql.InputType( - name='CategoryFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='status', type=gql.String), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - - -TermFilter = gql.InputType( - name='TermFilter', - arguments=[ - gql.Argument(name='status', type=gql.String), - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -GlossaryTermTargetFilter = gql.InputType( - name='GlossaryTermTargetFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -GlossaryNodeSearchFilter = gql.InputType( - name='GlossaryNodeSearchFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='nodeType', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Glossary/mutations.py b/backend/dataall/api/Objects/Glossary/mutations.py deleted file mode 100644 index 3bc3c5e38..000000000 --- a/backend/dataall/api/Objects/Glossary/mutations.py +++ /dev/null @@ -1,125 +0,0 @@ -from ... import gql -from .resolvers import * - -createGlossary = gql.MutationField( - name='createGlossary', - args=[gql.Argument(name='input', type=gql.Ref('CreateGlossaryInput'))], - resolver=create_glossary, - type=gql.Ref('Glossary'), -) - - -UpdateGlossary = gql.MutationField( - name='updateGlossary', - resolver=update_node, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('UpdateGlossaryInput')), - ], - type=gql.Ref('Glossary'), -) - -deleteGlossary = gql.MutationField( - name='deleteGlossary', - resolver=delete_node, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Integer, -) - - -CreateCategory = gql.MutationField( - name='createCategory', - args=[ - gql.Argument(name='parentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('CreateCategoryInput')), - ], - resolver=create_category, - type=gql.Ref('Category'), -) - -updateCategory = gql.MutationField( - name='updateCategory', - resolver=update_node, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('UpdateCategoryInput')), - ], - type=gql.Ref('Category'), -) - -deleteCategory = gql.MutationField( - name='deleteCategory', - resolver=delete_node, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Integer, -) - - -linkTerm = gql.MutationField( - name='linkTerm', - resolver=link_term, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('GlossaryTermLink'), -) - -requestLink = gql.MutationField( - name='requestLink', - resolver=request_link, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('GlossaryTermLink'), -) - - -createTerm = gql.MutationField( - name='createTerm', - type=gql.Ref('Term'), - resolver=create_term, - args=[ - gql.Argument(name='parentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('CreateTermInput')), - ], -) - -updateTerm = gql.MutationField( - name='updateTerm', - type=gql.Ref('Term'), - resolver=update_node, - args=[ - gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('UpdateTermInput')), - ], -) - -deleteTerm = gql.MutationField( - name='deleteTerm', - type=gql.Integer, - resolver=delete_node, - args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], -) - - -approveTermAssociation = gql.MutationField( - name='approveTermAssociation', - type=gql.Boolean, - resolver=approve_term_association, - args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], -) - -dismissTermAssociation = gql.MutationField( - name='dismissTermAssociation', - type=gql.Boolean, - resolver=dismiss_term_association, - args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], -) diff --git a/backend/dataall/api/Objects/Glossary/queries.py b/backend/dataall/api/Objects/Glossary/queries.py deleted file mode 100644 index d200de484..000000000 --- a/backend/dataall/api/Objects/Glossary/queries.py +++ /dev/null @@ -1,79 +0,0 @@ -from ... import gql -from .resolvers import * - -getGlossary = gql.QueryField( - name='getGlossary', - args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], - resolver=get_node, - type=gql.Ref('Glossary'), -) - - -getCategory = gql.QueryField( - name='getCategory', - resolver=get_node, - args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Category'), -) - - -getTerm = gql.QueryField( - name='getTerm', - resolver=get_node, - args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Term'), -) - -listGlossaries = gql.QueryField( - name='listGlossaries', - type=gql.Ref('GlossarySearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('GlossaryFilter'))], - resolver=list_glossaries, -) - - -SearchTerms = gql.QueryField( - name='searchTerms', - doc='Search glossary terms', - type=gql.Ref('TermSearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('TermFilter'))], - resolver=search_terms, -) - - -searchGlossaryHierarchy = gql.QueryField( - name='searchGlossaryHierarchy', - doc='Search glossary terms in the hierarchy', - type=gql.Ref('GlossaryChildrenSearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('TermFilter'))], - resolver=hierarchical_search, -) - - -SearchGlossary = gql.QueryField( - name='searchGlossary', - doc='Search glossary ', - type=gql.Ref('GlossaryChildrenSearchResult'), - args=[gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter'))], - resolver=search_terms, -) - - -getGlossaryTermLink = gql.QueryField( - name='getGlossaryTermLink', - doc='Returns a TermLink from its linkUri', - type=gql.Ref('GlossaryTermLink'), - resolver=get_link, - args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], -) - -listAssetLinkedTerms = gql.QueryField( - name='listAssetLinkedTerms', - doc='return all terms associated with a data asset', - args=[ - gql.Argument(name='uri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')), - ], - resolver=list_asset_linked_terms, - type=gql.Ref('TermLinkSearchResults'), -) diff --git a/backend/dataall/api/Objects/Glossary/resolvers.py b/backend/dataall/api/Objects/Glossary/resolvers.py deleted file mode 100644 index 801bd27dc..000000000 --- a/backend/dataall/api/Objects/Glossary/resolvers.py +++ /dev/null @@ -1,494 +0,0 @@ -from datetime import datetime - -from sqlalchemy import and_, or_, asc - -from .... import db -from ....api.context import Context -from ....db import paginate, exceptions, models -from ....searchproxy import upsert_dataset -from ....searchproxy import upsert_table -from ....searchproxy.indexers import upsert_folder, upsert_dashboard -from ....api.constants import ( - GlossaryRole -) - - -def resolve_glossary_node(obj: models.GlossaryNode, *_): - if obj.nodeType == 'G': - return 'Glossary' - elif obj.nodeType == 'C': - return 'Category' - elif obj.nodeType == 'T': - return 'Term' - else: - return None - - -def create_glossary( - context: Context, source, input: dict = None -) -> models.GlossaryNode: - with context.engine.scoped_session() as session: - return db.api.Glossary.create_glossary( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=input, - check_perm=True, - ) - - -def tree(context: Context, source: models.GlossaryNode): - if not source: - return None - adjency_list = {} - with context.engine.scoped_session() as session: - q = session.query(models.GlossaryNode).filter( - models.GlossaryNode.path.startswith(f'{source.path}/') - ) - for node in q: - if not adjency_list.get(node.parentUri): - adjency_list[node.parentUri] = [] - - -def node_tree(context: Context, source: models.GlossaryNode, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - q = ( - session.query(models.GlossaryNode) - .filter(models.GlossaryNode.path.startswith(source.path)) - .filter(models.GlossaryNode.deleted.is_(None)) - .order_by(asc(models.GlossaryNode.path)) - ) - term = filter.get('term') - nodeType = filter.get('nodeType') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - if nodeType: - q = q.filter(models.GlossaryNode.nodeType == nodeType) - - return paginate( - q, page_size=filter.get('pageSize', 10), page=filter.get('page', 1) - ).to_dict() - - -def list_node_children( - context: Context, source: models.GlossaryNode, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.list_node_children(session, source, filter) - - -def create_category( - context: Context, source, parentUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - return db.api.Glossary.create_category( - session=session, - username=context.username, - groups=context.groups, - uri=parentUri, - data=input, - check_perm=True, - ) - - -def create_term(context: Context, source, parentUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - return db.api.Glossary.create_term( - session=session, - username=context.username, - groups=context.groups, - uri=parentUri, - data=input, - check_perm=True, - ) - - -def list_glossaries(context: Context, source, filter: dict = None): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.list_glossaries( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def resolve_categories( - context: Context, source: models.GlossaryNode, filter: dict = None -): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.list_categories( - session=session, - username=context.username, - groups=context.groups, - uri=source.nodeUri, - data=filter, - check_perm=True, - ) - - -def resolve_terms(context: Context, source: models.GlossaryNode, filter: dict = None): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.list_terms( - session=session, - username=context.username, - groups=context.groups, - uri=source.nodeUri, - data=filter, - check_perm=True, - ) - - -def update_node( - context: Context, source, nodeUri: str = None, input: dict = None -) -> models.GlossaryNode: - with context.engine.scoped_session() as session: - return db.api.Glossary.update_node( - session, - username=context.username, - groups=context.groups, - uri=nodeUri, - data=input, - check_perm=True, - ) - - -def get_node(context: Context, source, nodeUri: str = None): - with context.engine.scoped_session() as session: - node: models.GlossaryNode = session.query(models.GlossaryNode).get(nodeUri) - if not node: - raise exceptions.ObjectNotFound('Node', nodeUri) - return node - - -def resolve_user_role(context: Context, source: models.GlossaryNode, **kwargs): - if not source: - return None - if source.admin in context.groups: - return GlossaryRole.Admin.value - return GlossaryRole.NoPermission.value - - -def delete_node(context: Context, source, nodeUri: str = None) -> bool: - with context.engine.scoped_session() as session: - return db.api.Glossary.delete_node( - session, - username=context.username, - groups=context.groups, - uri=nodeUri, - data=None, - check_perm=True, - ) - - -def hierarchical_search(context: Context, source, filter: dict = None): - if not filter: - filter = {} - - with context.engine.scoped_session() as session: - return db.api.Glossary.hierarchical_search( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def resolve_link(context, source, targetUri: str = None): - if not source: - return None - with context.engine.scoped_session() as session: - link = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.nodeUri == source.nodeUri, - models.TermLink.targetUri == targetUri, - ) - ) - .first() - ) - if not link: - link = { - 'nodeUri': source.nodeUri, - 'targetUri': targetUri, - 'created': datetime.now().isoformat(), - 'owner': context.username, - 'approvedByOwner': False, - 'approvedBySteward': False, - } - - return link - - -def search_terms(context: Context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.search_terms( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def request_link( - context: Context, - source, - nodeUri: str = None, - targetUri: str = None, - targetType: str = None, -): - with context.engine.scoped_session() as session: - return db.api.Glossary.link_term( - session=session, - username=context.username, - groups=context.groups, - uri=nodeUri, - data={ - 'targetUri': targetUri, - 'targetType': targetType, - 'approvedByOwner': True, - 'approvedBySteward': False, - }, - check_perm=True, - ) - - -def link_term( - context: Context, - source, - nodeUri: str = None, - targetUri: str = None, - targetType: str = None, -): - with context.engine.scoped_session() as session: - return db.api.Glossary.link_term( - session=session, - username=context.username, - groups=context.groups, - uri=nodeUri, - data={ - 'targetUri': targetUri, - 'targetType': targetType, - 'approvedByOwner': True, - 'approvedBySteward': True, - }, - check_perm=True, - ) - - -def resolve_term_glossary(context, source: models.GlossaryNode, **kwargs): - with context.engine.scoped_session() as session: - parentUri = source.path.split('/')[1] - return session.query(models.GlossaryNode).get(parentUri) - - -def get_link(context: Context, source, linkUri: str = None): - with context.engine.scoped_session() as session: - link = session.query(models.TermLink).get(linkUri) - if not link: - raise exceptions.ObjectNotFound('Link', linkUri) - return link - - -def target_union_resolver(obj, *_): - if isinstance(obj, models.DatasetTableColumn): - return 'DatasetTableColumn' - elif isinstance(obj, models.DatasetTable): - return 'DatasetTable' - elif isinstance(obj, models.Dataset): - return 'Dataset' - elif isinstance(obj, models.DatasetStorageLocation): - return 'DatasetStorageLocation' - elif isinstance(obj, models.Dashboard): - return 'Dashboard' - else: - return None - - -def resolve_link_target(context, source, **kwargs): - with context.engine.scoped_session() as session: - model = { - 'Dataset': models.Dataset, - 'DatasetTable': models.DatasetTable, - 'Column': models.DatasetTableColumn, - 'DatasetStorageLocation': models.DatasetStorageLocation, - 'Dashboard': models.Dashboard, - }[source.targetType] - target = session.query(model).get(source.targetUri) - return target - - -def resolve_term_associations( - context, source: models.GlossaryNode, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Glossary.list_term_associations( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data={'source': source, 'filter': filter}, - check_perm=True, - ) - - -def resolve_stats(context, source: models.GlossaryNode, **kwargs): - - with context.engine.scoped_session() as session: - categories = ( - session.query(models.GlossaryNode) - .filter( - and_( - models.GlossaryNode.path.startswith(source.path), - models.GlossaryNode.nodeType == 'C', - models.GlossaryNode.deleted.is_(None), - ) - ) - .count() - ) - terms = ( - session.query(models.GlossaryNode) - .filter( - and_( - models.GlossaryNode.path.startswith(source.path), - models.GlossaryNode.nodeType == 'T', - models.GlossaryNode.deleted.is_(None), - ) - ) - .count() - ) - - associations = ( - session.query(models.TermLink) - .join( - models.GlossaryNode, - models.GlossaryNode.nodeType == models.TermLink.nodeUri, - ) - .filter(models.GlossaryNode.path.startswith(source.path)) - .count() - ) - - return {'categories': categories, 'terms': terms, 'associations': associations} - - -def list_asset_linked_terms( - context: Context, source, uri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - q = ( - session.query(models.TermLink) - .join( - models.GlossaryNode, - models.GlossaryNode.nodeUri == models.TermLink.nodeUri, - ) - .filter(models.TermLink.targetUri == uri) - ) - term = filter.get('term') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 10) - ).to_dict() - - -def resolve_link_node(context: Context, source: models.TermLink, **kwargs): - with context.engine.scoped_session() as session: - term = session.query(models.GlossaryNode).get(source.nodeUri) - return term - - -def approve_term_association(context: Context, source, linkUri: str = None): - updated = False - with context.engine.scoped_session() as session: - link: models.TermLink = session.query(models.TermLink).get(linkUri) - if not link: - raise exceptions.ObjectNotFound('Link', linkUri) - verify_term_association_approver_role( - session, context.username, context.groups, link - ) - if not link.approvedBySteward: - link.approvedBySteward = True - updated = True - reindex(context, linkUri=linkUri) - return updated - - -def dismiss_term_association(context: Context, source, linkUri: str = None): - updated = False - with context.engine.scoped_session() as session: - link: models.TermLink = session.query(models.TermLink).get(linkUri) - if not link: - raise exceptions.ObjectNotFound('Link', linkUri) - verify_term_association_approver_role( - session, context.username, context.groups, link - ) - if link.approvedBySteward: - link.approvedBySteward = False - updated = True - reindex(context, linkUri=linkUri) - return updated - - -def verify_term_association_approver_role(session, username, groups, link): - glossary_node = session.query(models.GlossaryNode).get(link.nodeUri) - if glossary_node.owner != username and glossary_node.admin not in groups: - raise exceptions.UnauthorizedOperation( - 'ASSOCIATE_GLOSSARY_TERM', - f'User: {username} is not allowed to manage glossary term associations', - ) - - -def reindex(context, linkUri): - with context.engine.scoped_session() as session: - link: models.TermLink = session.query(models.TermLink).get(linkUri) - if not link: - return - target = resolve_link_target(context, source=link) - if isinstance(target, models.Dataset): - upsert_dataset(session=session, es=context.es, datasetUri=link.targetUri) - elif isinstance(target, models.DatasetTable): - upsert_table(session=session, es=context.es, tableUri=link.targetUri) - elif isinstance(target, models.DatasetStorageLocation): - upsert_folder(session=session, es=context.es, locationUri=link.targetUri) - elif isinstance(target, models.Dashboard): - upsert_dashboard(session=session, es=context.es, dashboardUri=link.targetUri) diff --git a/backend/dataall/api/Objects/Glossary/schema.py b/backend/dataall/api/Objects/Glossary/schema.py deleted file mode 100644 index 36fd1b758..000000000 --- a/backend/dataall/api/Objects/Glossary/schema.py +++ /dev/null @@ -1,289 +0,0 @@ -from ... import gql -from .resolvers import * -from ...constants import GlossaryRole - -GlossaryNode = gql.Union( - name='GlossaryNode', - types=[ - gql.Ref('Glossary'), - gql.Ref('Category'), - gql.Ref('Term'), - ], - resolver=resolve_glossary_node, -) - -GlossaryChildrenSearchResult = gql.ObjectType( - name='GlossaryChildrenSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('GlossaryNode'))), - ], -) - -Glossary = gql.ObjectType( - name='Glossary', - fields=[ - gql.Field(name='nodeUri', type=gql.ID), - gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='status', type=gql.String), - gql.Field(name='owner', type=gql.NonNullableType(gql.String)), - gql.Field(name='path', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.NonNullableType(gql.String)), - gql.Field(name='name', type=gql.NonNullableType(gql.String)), - gql.Field(name='admin', type=gql.String), - gql.Field( - name='userRoleForGlossary', - type=GlossaryRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field(name='readme', type=gql.String), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='isMatch', type=gql.Boolean), - gql.Field( - name='assetLink', - args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], - resolver=resolve_link, - type=gql.Ref('GlossaryTermLink'), - ), - gql.Field( - name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') - ), - gql.Field( - resolver=node_tree, - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) - ], - name='tree', - type=gql.Ref('GlossaryChildrenSearchResult'), - ), - gql.Field( - resolver=list_node_children, - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) - ], - name='children', - type=gql.Ref('GlossaryChildrenSearchResult'), - ), - gql.Field( - name='categories', - args=[gql.Argument(name='filter', type=gql.Ref('CategoryFilter'))], - resolver=resolve_categories, - type=gql.Ref('CategorySearchResult'), - ), - gql.Field( - name='associations', - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) - ], - resolver=resolve_term_associations, - type=gql.Ref('TermLinkSearchResults'), - ), - ], -) - - -Category = gql.ObjectType( - name='Category', - fields=[ - gql.Field(name='nodeUri', type=gql.ID), - gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='owner', type=gql.NonNullableType(gql.String)), - gql.Field(name='path', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.NonNullableType(gql.String)), - gql.Field(name='status', type=gql.NonNullableType(gql.String)), - gql.Field(name='name', type=gql.NonNullableType(gql.String)), - gql.Field(name='readme', type=gql.String), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='isMatch', type=gql.Boolean), - gql.Field( - name='assetLink', - args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], - resolver=resolve_link, - type=gql.Ref('GlossaryTermLink'), - ), - gql.Field( - name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') - ), - gql.Field( - resolver=list_node_children, - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) - ], - name='children', - type=gql.Ref('GlossaryChildrenSearchResult'), - ), - gql.Field( - name='categories', - resolver=resolve_categories, - args=[ - gql.Argument(name='filter', type=gql.Ref('CategoryFilter')), - ], - type=gql.Ref('CategorySearchResult'), - ), - gql.Field( - name='terms', - resolver=resolve_terms, - args=[ - gql.Argument(name='filter', type=gql.Ref('TermFilter')), - ], - type=gql.Ref('TermSearchResult'), - ), - gql.Field( - name='associations', - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) - ], - resolver=resolve_term_associations, - type=gql.Ref('TermLinkSearchResults'), - ), - ], -) - -Term = gql.ObjectType( - name='Term', - fields=[ - gql.Field(name='nodeUri', type=gql.ID), - gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='owner', type=gql.NonNullableType(gql.String)), - gql.Field(name='path', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.NonNullableType(gql.String)), - gql.Field(name='name', type=gql.NonNullableType(gql.String)), - gql.Field(name='status', type=gql.NonNullableType(gql.String)), - gql.Field(name='readme', type=gql.String), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='isMatch', type=gql.Boolean), - gql.Field( - name='assetLink', - args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], - resolver=resolve_link, - type=gql.Ref('GlossaryTermLink'), - ), - gql.Field( - resolver=list_node_children, - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) - ], - name='children', - type=gql.Ref('GlossaryChildrenSearchResult'), - ), - gql.Field( - name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') - ), - gql.Field( - name='glossary', type=gql.Ref('Glossary'), resolver=resolve_term_glossary - ), - gql.Field( - name='associations', - args=[ - gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) - ], - resolver=resolve_term_associations, - type=gql.Ref('TermLinkSearchResults'), - ), - ], -) - -TermLinkSearchResults = gql.ObjectType( - name='TermLinkSearchResults', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('GlossaryTermLink'))), - ], -) - - -TermSearchResult = gql.ObjectType( - name='TermSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Term'))), - ], -) - - -CategorySearchResult = gql.ObjectType( - name='CategorySearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Category'))), - ], -) - - -GlossarySearchResult = gql.ObjectType( - name='GlossarySearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Glossary'))), - ], -) - -GlossaryTermLinkTarget = gql.Union( - name='GlossaryTermLinkTarget', - types=[ - gql.Ref('Dataset'), - gql.Ref('DatasetTable'), - gql.Ref('DatasetStorageLocation'), - gql.Ref('DatasetTableColumn'), - gql.Ref('Dashboard'), - ], - resolver=target_union_resolver, -) - -GlossaryTermLink = gql.ObjectType( - 'GlossaryTermLink', - fields=[ - gql.Field(name='linkUri', type=gql.ID), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='updated', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='nodeUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='targetType', type=gql.NonNullableType(gql.String)), - gql.Field(name='approvedByOwner', type=gql.NonNullableType(gql.Boolean)), - gql.Field(name='approvedBySteward', type=gql.NonNullableType(gql.Boolean)), - gql.Field(name='term', resolver=resolve_link_node, type=gql.Ref('Term')), - gql.Field( - name='target', - resolver=resolve_link_target, - type=gql.Ref('GlossaryTermLinkTarget'), - ), - ], -) - - -GlossaryNodeStatistics = gql.ObjectType( - name='GlossaryNodeStatistics', - fields=[ - gql.Field(name='categories', type=gql.Integer), - gql.Field(name='terms', type=gql.Integer), - gql.Field(name='associations', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Group/__init__.py b/backend/dataall/api/Objects/Group/__init__.py deleted file mode 100644 index b26d14d51..000000000 --- a/backend/dataall/api/Objects/Group/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from . import input_types, mutations, queries, resolvers, schema - -__all__ = ['resolvers', 'input_types', 'schema', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Group/input_types.py b/backend/dataall/api/Objects/Group/input_types.py deleted file mode 100644 index 9cccb014c..000000000 --- a/backend/dataall/api/Objects/Group/input_types.py +++ /dev/null @@ -1,18 +0,0 @@ -from ... import gql - -GroupFilter = gql.InputType( - name='GroupFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -CognitoGroupFilter = gql.InputType( - name='CognitoGroupFilter', - arguments=[ - gql.Argument(name='type', type=gql.String), - gql.Argument(name='uri', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Group/queries.py b/backend/dataall/api/Objects/Group/queries.py deleted file mode 100644 index 5cbf484ff..000000000 --- a/backend/dataall/api/Objects/Group/queries.py +++ /dev/null @@ -1,44 +0,0 @@ -from ... import gql -from .resolvers import get_group, list_datasets_owned_by_env_group, list_data_items_shared_with_env_group, list_cognito_groups - -getGroup = gql.QueryField( - name='getGroup', - args=[gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Group'), - resolver=get_group, -) - - -listDatasetsOwnedByEnvGroup = gql.QueryField( - name='listDatasetsOwnedByEnvGroup', - type=gql.Ref('DatasetSearchResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), - ], - resolver=list_datasets_owned_by_env_group, - test_scope='Dataset', -) - - -listDataItemsSharedWithEnvGroup = gql.QueryField( - name='listDataItemsSharedWithEnvGroup', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), - ], - resolver=list_data_items_shared_with_env_group, - type=gql.Ref('EnvironmentPublishedItemSearchResults'), - test_scope='Dataset', -) - -listCognitoGroups = gql.QueryField( - name='listCognitoGroups', - args=[ - gql.Argument(name='filter', type=gql.Ref('CognitoGroupFilter')), - ], - type=gql.ArrayType(gql.Ref('CognitoGroup')), - resolver=list_cognito_groups -) diff --git a/backend/dataall/api/Objects/Group/resolvers.py b/backend/dataall/api/Objects/Group/resolvers.py deleted file mode 100644 index 9192b6b59..000000000 --- a/backend/dataall/api/Objects/Group/resolvers.py +++ /dev/null @@ -1,108 +0,0 @@ -import os -import logging -from .... import db -from ....db import exceptions -from ....db.models import Group -from ....aws.handlers.cognito import Cognito - - -log = logging.getLogger() - - -def resolve_group_environment_permissions(context, source, environmentUri): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.Environment.list_group_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'groupUri': source.groupUri}, - check_perm=True, - ) - - -def resolve_group_tenant_permissions(context, source): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.TenantPolicy.list_group_tenant_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=source.groupUri, - data=None, - check_perm=True, - ) - - -def get_group(context, source, groupUri): - if not groupUri: - exceptions.RequiredParameter('groupUri') - return Group(groupUri=groupUri, name=groupUri, label=groupUri) - - -def list_datasets_owned_by_env_group( - context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_environment_group_datasets( - session=session, - username=context.username, - groups=context.groups, - envUri=environmentUri, - groupUri=groupUri, - data=filter, - check_perm=True, - ) - - -def list_data_items_shared_with_env_group( - context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Environment.paginated_shared_with_environment_group_datasets( - session=session, - username=context.username, - groups=context.groups, - envUri=environmentUri, - groupUri=groupUri, - data=filter, - check_perm=True, - ) - - -def list_cognito_groups(context, source, filter: dict = None): - envname = os.getenv('envname', 'local') - if envname in ['dkrcompose']: - return [{"groupName": 'Engineers'}, {"groupName": 'Scientists'}, {"groupName": 'Requesters'}, {"groupName": 'Producers'}, {"groupName": 'Consumers'}] - current_region = os.getenv('AWS_REGION', 'eu-west-1') - groups = Cognito.list_cognito_groups(envname=envname, region=current_region) - category, category_uri = filter.get("type"), filter.get("uri") - if category and category_uri: - if category == 'environment': - with context.engine.scoped_session() as session: - invited_groups = db.api.Environment.query_all_environment_groups( - session=session, - uri=category_uri, - filter=None, - ).all() - if category == 'organization': - with context.engine.scoped_session() as session: - organization = db.api.Organization.get_organization_by_uri(session, category_uri) - invited_groups = db.api.Organization.query_organization_groups( - session=session, - uri=organization.organizationUri, - filter=None, - ).all() - invited_group_uris = [item.groupUri for item in invited_groups] - res = [] - for group in groups: - if group['GroupName'] not in invited_group_uris: - res.append({"groupName": group['GroupName']}) - return res diff --git a/backend/dataall/api/Objects/Group/schema.py b/backend/dataall/api/Objects/Group/schema.py deleted file mode 100644 index 75f5350a5..000000000 --- a/backend/dataall/api/Objects/Group/schema.py +++ /dev/null @@ -1,55 +0,0 @@ -from .resolvers import * -from ...constants import * - - -Group = gql.ObjectType( - name='Group', - fields=[ - gql.Field(name='groupUri', type=gql.String), - gql.Field(name='invitedBy', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='environmentIAMRoleArn', type=gql.String), - gql.Field(name='environmentIAMRoleName', type=gql.String), - gql.Field(name='environmentAthenaWorkGroup', type=gql.String), - gql.Field( - name='environmentPermissions', - args=[ - gql.Argument( - name='environmentUri', type=gql.NonNullableType(gql.String) - ) - ], - type=gql.ArrayType(gql.Ref('Permission')), - resolver=resolve_group_environment_permissions, - ), - gql.Field( - name='tenantPermissions', - type=gql.ArrayType(gql.Ref('Permission')), - resolver=resolve_group_tenant_permissions, - ), - ], -) - -GroupSearchResult = gql.ObjectType( - name='GroupSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Group)), - ], -) - -CognitoGroup = gql.ObjectType( - name='CognitoGroup', - fields=[ - gql.Field(name='groupName', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/KeyValueTag/__init__.py b/backend/dataall/api/Objects/KeyValueTag/__init__.py deleted file mode 100644 index 7a595b458..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - queries, - resolvers, - schema, - mutations, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/KeyValueTag/input_types.py b/backend/dataall/api/Objects/KeyValueTag/input_types.py deleted file mode 100644 index 48627fe25..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/input_types.py +++ /dev/null @@ -1,19 +0,0 @@ -from ... import gql - -KeyValueTagInput = gql.InputType( - name='KeyValueTagInput', - arguments=[ - gql.Argument(name='key', type=gql.NonNullableType(gql.String)), - gql.Argument(name='value', type=gql.NonNullableType(gql.String)), - gql.Argument(name='cascade', type=gql.NonNullableType(gql.Boolean)), - ], -) - -UpdateKeyValueTagsInput = gql.InputType( - name='UpdateKeyValueTagsInput', - arguments=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='tags', type=gql.ArrayType(gql.Ref('KeyValueTagInput'))), - ], -) diff --git a/backend/dataall/api/Objects/KeyValueTag/mutations.py b/backend/dataall/api/Objects/KeyValueTag/mutations.py deleted file mode 100644 index 683a98a3b..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/mutations.py +++ /dev/null @@ -1,14 +0,0 @@ -from ... import gql -from .resolvers import * - - -updateKeyValueTags = gql.MutationField( - name='updateKeyValueTags', - type=gql.ArrayType(gql.Ref('KeyValueTag')), - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('UpdateKeyValueTagsInput')) - ), - ], - resolver=update_key_value_tags, -) diff --git a/backend/dataall/api/Objects/KeyValueTag/queries.py b/backend/dataall/api/Objects/KeyValueTag/queries.py deleted file mode 100644 index 97afce6f0..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/queries.py +++ /dev/null @@ -1,13 +0,0 @@ -from ... import gql -from .resolvers import * - - -listKeyValueTags = gql.QueryField( - name='listKeyValueTags', - type=gql.ArrayType(gql.Ref('KeyValueTag')), - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - resolver=list_key_value_tags, -) diff --git a/backend/dataall/api/Objects/KeyValueTag/resolvers.py b/backend/dataall/api/Objects/KeyValueTag/resolvers.py deleted file mode 100644 index f2df4730d..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/resolvers.py +++ /dev/null @@ -1,31 +0,0 @@ -from .... import db -from ..Stack import stack_helper -from ...context import Context - - -def list_key_value_tags( - context: Context, source, targetUri: str = None, targetType: str = None -): - with context.engine.scoped_session() as session: - return db.api.KeyValueTag.list_key_value_tags( - session=session, - username=context.username, - groups=context.groups, - uri=targetUri, - data={'targetType': targetType}, - check_perm=True, - ) - - -def update_key_value_tags(context: Context, source, input=None): - with context.engine.scoped_session() as session: - kv_tags = db.api.KeyValueTag.update_key_value_tags( - session=session, - username=context.username, - groups=context.groups, - uri=input['targetUri'], - data=input, - check_perm=True, - ) - stack_helper.deploy_stack(context=context, targetUri=input['targetUri']) - return kv_tags diff --git a/backend/dataall/api/Objects/KeyValueTag/schema.py b/backend/dataall/api/Objects/KeyValueTag/schema.py deleted file mode 100644 index 5d5a30a9c..000000000 --- a/backend/dataall/api/Objects/KeyValueTag/schema.py +++ /dev/null @@ -1,13 +0,0 @@ -from ... import gql - -KeyValueTag = gql.ObjectType( - name='KeyValueTag', - fields=[ - gql.Field(name='tagUri', type=gql.ID), - gql.Field(name='targetType', type=gql.String), - gql.Field(name='targetUri', type=gql.String), - gql.Field(name='key', type=gql.String), - gql.Field(name='value', type=gql.String), - gql.Field(name='cascade', type=gql.Boolean), - ], -) diff --git a/backend/dataall/api/Objects/Notification/__init__.py b/backend/dataall/api/Objects/Notification/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Notification/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Notification/input_types.py b/backend/dataall/api/Objects/Notification/input_types.py deleted file mode 100644 index 0959a2d2f..000000000 --- a/backend/dataall/api/Objects/Notification/input_types.py +++ /dev/null @@ -1,14 +0,0 @@ -from ... import gql - -NotificationFilter = gql.InputType( - name='NotificationFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='read', type=gql.Boolean), - gql.Argument(name='unread', type=gql.Boolean), - gql.Argument(name='archived', type=gql.Boolean), - gql.Argument(name='type', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Notification/mutations.py b/backend/dataall/api/Objects/Notification/mutations.py deleted file mode 100644 index bb73fbbfb..000000000 --- a/backend/dataall/api/Objects/Notification/mutations.py +++ /dev/null @@ -1,19 +0,0 @@ -from ... import gql -from .resolvers import * - - -markNotificationAsRead = gql.MutationField( - name='markNotificationAsRead', - args=[ - gql.Argument(name='notificationUri', type=gql.String), - ], - type=gql.Boolean, - resolver=mark_as_read, -) - -deleteNotification = gql.MutationField( - name='deleteNotification', - args=[gql.Argument(name='notificationUri', type=gql.String)], - type=gql.Boolean, - resolver=delete, -) diff --git a/backend/dataall/api/Objects/Notification/queries.py b/backend/dataall/api/Objects/Notification/queries.py deleted file mode 100644 index 4e4a70185..000000000 --- a/backend/dataall/api/Objects/Notification/queries.py +++ /dev/null @@ -1,30 +0,0 @@ -from ... import gql -from .resolvers import * - - -listNotifications = gql.QueryField( - name='listNotifications', - args=[ - gql.Argument(name='filter', type=gql.Ref('NotificationFilter')), - ], - type=gql.Ref('NotificationSearchResult'), - resolver=list_my_notifications, -) - -countUnreadNotifications = gql.QueryField( - name='countUnreadNotifications', - type=gql.Integer, - resolver=count_unread_notifications, -) - -countReadNotifications = gql.QueryField( - name='countReadNotifications', - type=gql.Integer, - resolver=count_read_notifications, -) - -countDeletedNotifications = gql.QueryField( - name='countDeletedNotifications', - type=gql.Integer, - resolver=count_deleted_notifications, -) diff --git a/backend/dataall/api/Objects/Notification/resolvers.py b/backend/dataall/api/Objects/Notification/resolvers.py deleted file mode 100644 index 5e5ba1422..000000000 --- a/backend/dataall/api/Objects/Notification/resolvers.py +++ /dev/null @@ -1,48 +0,0 @@ -import logging - -from .... import db -from ....api.context import Context - -log = logging.getLogger(__name__) - - -def list_my_notifications( - context: Context, - source, - filter: dict = None, -): - with context.engine.scoped_session() as session: - return db.api.Notification.paginated_notifications( - session=session, username=context.username, filter=filter - ) - - -def mark_as_read( - context: Context, - source, - notificationUri: str = None, -): - with context.engine.scoped_session() as session: - return db.api.Notification.read_notification(session, notificationUri) - - -def count_unread_notifications(context: Context, source): - with context.engine.scoped_session() as session: - return db.api.Notification.count_unread_notifications(session, context.username) - - -def count_deleted_notifications(context: Context, source): - with context.engine.scoped_session() as session: - return db.api.Notification.count_deleted_notifications( - session, context.username - ) - - -def count_read_notifications(context: Context, source): - with context.engine.scoped_session() as session: - return db.api.Notification.count_read_notifications(session, context.username) - - -def delete(context: Context, source, notificationUri): - with context.engine.scoped_session() as session: - return db.api.Notification.delete_notification(session, notificationUri) diff --git a/backend/dataall/api/Objects/Notification/schema.py b/backend/dataall/api/Objects/Notification/schema.py deleted file mode 100644 index 1379c4ba9..000000000 --- a/backend/dataall/api/Objects/Notification/schema.py +++ /dev/null @@ -1,34 +0,0 @@ -from ... import gql -from ....db import models - - -def resolve_enum(context, source: models.Notification): - return source.type.name - - -Notification = gql.ObjectType( - name='Notification', - fields=[ - gql.Field(name='notificationUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='type', type=gql.String, resolver=resolve_enum), - gql.Field(name='message', type=gql.String), - gql.Field(name='username', type=gql.NonNullableType(gql.String)), - gql.Field(name='target_uri', type=gql.NonNullableType(gql.String)), - gql.Field(name='is_read', type=gql.Boolean), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - ], -) - - -NotificationSearchResult = gql.ObjectType( - name='NotificationSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Notification)), - ], -) diff --git a/backend/dataall/api/Objects/Organization/__init__.py b/backend/dataall/api/Objects/Organization/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Organization/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Organization/input_types.py b/backend/dataall/api/Objects/Organization/input_types.py deleted file mode 100644 index c882a1f75..000000000 --- a/backend/dataall/api/Objects/Organization/input_types.py +++ /dev/null @@ -1,80 +0,0 @@ -from ....api.constants import * - -NewOrganizationInput = gql.InputType( - name='NewOrganizationInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='SamlGroupName', type=gql.String), - ], -) - -ModifyOrganizationInput = gql.InputType( - name='ModifyOrganizationInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='SamlGroupName', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - ], -) - - -class OrganizationSortField(GraphQLEnumMapper): - created = 'created' - updated = 'updated' - label = 'label' - - -OrganizationSortCriteria = gql.InputType( - name='OrganizationSortCriteria', - arguments=[ - gql.Argument( - name='field', - type=gql.NonNullableType(OrganizationSortField.toGraphQLEnum()), - ), - gql.Argument( - name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) - ), - ], -) - -OrganizationFilter = gql.InputType( - name='OrganizationFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('displayArchived', gql.Boolean), - gql.Argument('sort', gql.ArrayType(OrganizationSortCriteria)), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('roles', gql.ArrayType(OrganisationUserRole.toGraphQLEnum())), - gql.Argument('tags', gql.ArrayType(gql.String)), - ], -) - - -OrganizationTopicFilter = gql.InputType( - name='OrganizationTopicFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -OrganizationTopicInput = gql.InputType( - name='OrganizationTopicInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - ], -) - -InviteGroupToOrganizationInput = gql.InputType( - name='InviteGroupToOrganizationInput', - arguments=[ - gql.Argument('organizationUri', gql.NonNullableType(gql.String)), - gql.Argument('groupUri', gql.NonNullableType(gql.String)), - ], -) diff --git a/backend/dataall/api/Objects/Organization/mutations.py b/backend/dataall/api/Objects/Organization/mutations.py deleted file mode 100644 index 878cff36d..000000000 --- a/backend/dataall/api/Objects/Organization/mutations.py +++ /dev/null @@ -1,55 +0,0 @@ -from ... import gql -from .input_types import ( - ModifyOrganizationInput, - NewOrganizationInput, - InviteGroupToOrganizationInput, -) -from .resolvers import * -from .schema import Organization - -createOrganization = gql.MutationField( - name='createOrganization', - args=[gql.Argument(name='input', type=NewOrganizationInput)], - type=gql.Thunk(lambda: Organization), - resolver=create_organization, - test_scope='Organization', -) - -updateOrganization = gql.MutationField( - name='updateOrganization', - args=[ - gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.NonNullableType(ModifyOrganizationInput)), - ], - type=gql.Thunk(lambda: Organization), - resolver=update_organization, - test_scope='Organization', -) - -archiveOrganization = gql.MutationField( - name='archiveOrganization', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - resolver=archive_organization, - type=gql.Boolean, -) - -inviteGroupToOrganization = gql.MutationField( - name='inviteGroupToOrganization', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(InviteGroupToOrganizationInput) - ) - ], - type=gql.Ref('Organization'), - resolver=invite_group, -) - -removeGroupFromOrganization = gql.MutationField( - name='removeGroupFromOrganization', - args=[ - gql.Argument('organizationUri', type=gql.NonNullableType(gql.String)), - gql.Argument('groupUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Ref('Organization'), - resolver=remove_group, -) diff --git a/backend/dataall/api/Objects/Organization/queries.py b/backend/dataall/api/Objects/Organization/queries.py deleted file mode 100644 index 3f47e88b0..000000000 --- a/backend/dataall/api/Objects/Organization/queries.py +++ /dev/null @@ -1,44 +0,0 @@ -from ... import gql -from .input_types import OrganizationFilter -from .resolvers import * -from .schema import ( - Organization, - OrganizationSearchResult, -) - -getOrganization = gql.QueryField( - name='getOrganization', - args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], - type=gql.Thunk(lambda: Organization), - resolver=get_organization, - test_scope='Organization', -) - - -listOrganizations = gql.QueryField( - name='listOrganizations', - args=[gql.Argument('filter', OrganizationFilter)], - type=OrganizationSearchResult, - resolver=list_organizations, - test_scope='Organization', -) - -listOrganizationInvitedGroups = gql.QueryField( - name='listOrganizationInvitedGroups', - type=gql.Ref('GroupSearchResult'), - args=[ - gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - resolver=list_organization_invited_groups, -) - -listOrganizationGroups = gql.QueryField( - name='listOrganizationGroups', - type=gql.Ref('GroupSearchResult'), - args=[ - gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - resolver=list_organization_groups, -) diff --git a/backend/dataall/api/Objects/Organization/resolvers.py b/backend/dataall/api/Objects/Organization/resolvers.py deleted file mode 100644 index f97f2849c..000000000 --- a/backend/dataall/api/Objects/Organization/resolvers.py +++ /dev/null @@ -1,177 +0,0 @@ -from .... import db -from ....api.constants import OrganisationUserRole -from ....api.context import Context -from ....db.api.organization import Organization -from ....db import models - - -def create_organization(context: Context, source, input=None): - with context.engine.scoped_session() as session: - organization = Organization.create_organization( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=input, - check_perm=True, - ) - return organization - - -def update_organization(context, source, organizationUri=None, input=None): - with context.engine.scoped_session() as session: - return Organization.update_organization( - session=session, - username=context.username, - groups=context.groups, - uri=organizationUri, - data=input, - check_perm=True, - ) - - -def get_organization(context: Context, source, organizationUri=None): - with context.engine.scoped_session() as session: - return Organization.get_organization_by_uri( - session=session, uri=organizationUri - ) - - -def list_organizations(context: Context, source, filter=None): - if not filter: - filter = {'page': 1, 'pageSize': 5} - - with context.engine.scoped_session() as session: - return Organization.paginated_user_organizations( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def list_groups(context, source: models.Organization, filter=None): - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Organization.paginated_organization_groups( - session=session, - username=context.username, - groups=context.groups, - uri=source.organizationUri, - data=filter, - check_perm=True, - ) - - -def list_organization_environments(context, source, filter=None): - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return Organization.paginated_organization_environments( - session=session, - username=context.username, - groups=context.groups, - uri=source.organizationUri, - data=filter, - check_perm=True, - ) - - -def stats(context, source: models.Organization, **kwargs): - with context.engine.scoped_session() as session: - environments = db.api.Organization.count_organization_environments( - session=session, uri=source.organizationUri - ) - - groups = db.api.Organization.count_organization_invited_groups( - session=session, uri=source.organizationUri, group=source.SamlGroupName - ) - - return {'environments': environments, 'groups': groups, 'users': 0} - - -def resolve_user_role(context: Context, source: models.Organization): - if source.owner == context.username: - return OrganisationUserRole.Owner.value - elif source.SamlGroupName in context.groups: - return OrganisationUserRole.Admin.value - else: - with context.engine.scoped_session() as session: - if Organization.find_organization_membership( - session=session, uri=source.organizationUri, groups=context.groups - ): - return OrganisationUserRole.Invited.value - return OrganisationUserRole.NoPermission.value - - -def archive_organization(context: Context, source, organizationUri: str = None): - with context.engine.scoped_session() as session: - return Organization.archive_organization( - session=session, - username=context.username, - groups=context.groups, - uri=organizationUri, - data=None, - check_perm=True, - ) - - -def invite_group(context: Context, source, input): - with context.engine.scoped_session() as session: - organization, organization_group = db.api.Organization.invite_group( - session=session, - username=context.username, - groups=context.groups, - uri=input['organizationUri'], - data=input, - check_perm=True, - ) - return organization - - -def remove_group(context: Context, source, organizationUri=None, groupUri=None): - with context.engine.scoped_session() as session: - organization = db.api.Organization.remove_group( - session=session, - username=context.username, - groups=context.groups, - uri=organizationUri, - data={'groupUri': groupUri}, - check_perm=True, - ) - return organization - - -def list_organization_invited_groups( - context: Context, source, organizationUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Organization.paginated_organization_invited_groups( - session=session, - username=context.username, - groups=context.groups, - uri=organizationUri, - data=filter, - check_perm=True, - ) - - -def list_organization_groups( - context: Context, source, organizationUri=None, filter=None -): - if filter is None: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Organization.paginated_organization_groups( - session=session, - username=context.username, - groups=context.groups, - uri=organizationUri, - data=filter, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/Organization/schema.py b/backend/dataall/api/Objects/Organization/schema.py deleted file mode 100644 index 6031ff222..000000000 --- a/backend/dataall/api/Objects/Organization/schema.py +++ /dev/null @@ -1,54 +0,0 @@ -from .input_types import * -from .resolvers import * -from ...constants import OrganisationUserRole - -OrganizationStats = gql.ObjectType( - name='OrganizationStats', - fields=[ - gql.Field(name='groups', type=gql.Integer), - gql.Field(name='users', type=gql.Integer), - gql.Field(name='environments', type=gql.Integer), - ], -) -Organization = gql.ObjectType( - name='Organization', - fields=[ - gql.Field(name='organizationUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='owner', type=gql.String), - gql.Field(name='SamlGroupName', type=gql.String), - gql.Field( - name='userRoleInOrganization', - type=OrganisationUserRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='environments', - args=[gql.Argument(name='filter', type=gql.Ref('EnvironmentFilter'))], - type=gql.Ref('EnvironmentSearchResult'), - resolver=list_organization_environments, - ), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='stats', type=OrganizationStats, resolver=stats), - ], -) - - -OrganizationSearchResult = gql.ObjectType( - name='OrganizationSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Organization)), - ], -) diff --git a/backend/dataall/api/Objects/Permission/__init__.py b/backend/dataall/api/Objects/Permission/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Permission/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Permission/input_types.py b/backend/dataall/api/Objects/Permission/input_types.py deleted file mode 100644 index e02c9156e..000000000 --- a/backend/dataall/api/Objects/Permission/input_types.py +++ /dev/null @@ -1,19 +0,0 @@ -from ... import gql - -TenantPermissionFilter = gql.InputType( - name='TenantPermissionFilter', - arguments=[ - gql.Argument(name='term', type=gql.Boolean), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -ResourcePermissionFilter = gql.InputType( - name='ResourcePermissionFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/Permission/queries.py b/backend/dataall/api/Objects/Permission/queries.py deleted file mode 100644 index e2eaaa757..000000000 --- a/backend/dataall/api/Objects/Permission/queries.py +++ /dev/null @@ -1,21 +0,0 @@ -from ... import gql -from .resolvers import * - - -listTenantPermissions = gql.QueryField( - name='listTenantPermissions', - args=[ - gql.Argument(name='filter', type=gql.Ref('TenantPermissionFilter')), - ], - type=gql.Ref('PermissionSearchResult'), - resolver=list_tenant_permissions, -) - -listResourcePermissions = gql.QueryField( - name='listResourcePermissions', - args=[ - gql.Argument(name='filter', type=gql.Ref('ResourcePermissionFilter')), - ], - type=gql.Ref('PermissionSearchResult'), - resolver=list_resource_permissions, -) diff --git a/backend/dataall/api/Objects/Permission/resolvers.py b/backend/dataall/api/Objects/Permission/resolvers.py deleted file mode 100644 index 0b09c8b77..000000000 --- a/backend/dataall/api/Objects/Permission/resolvers.py +++ /dev/null @@ -1,32 +0,0 @@ -import logging - -from .... import db -from ....api.context import Context - -log = logging.getLogger(__name__) - - -def list_tenant_permissions( - context: Context, - source, - filter: dict = None, -): - with context.engine.scoped_session() as session: - if not filter: - filter = {} - return db.api.Permission.paginated_tenant_permissions( - session=session, data=filter - ) - - -def list_resource_permissions( - context: Context, - source, - filter: dict = None, -): - with context.engine.scoped_session() as session: - if not filter: - filter = {} - return db.api.Permission.paginated_resource_permissions( - session=session, data=filter - ) diff --git a/backend/dataall/api/Objects/Permission/schema.py b/backend/dataall/api/Objects/Permission/schema.py deleted file mode 100644 index 3054f97e2..000000000 --- a/backend/dataall/api/Objects/Permission/schema.py +++ /dev/null @@ -1,30 +0,0 @@ -from .... import db -from ... import gql - - -def resolve_enum(context, source: db.models.Notification): - return source.type.name if source.type else db.models.PermissionType.TENANT.name - - -Permission = gql.ObjectType( - name='Permission', - fields=[ - gql.Field(name='permissionUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='type', type=gql.String, resolver=resolve_enum), - gql.Field(name='name', type=gql.NonNullableType(gql.String)), - gql.Field(name='description', type=gql.NonNullableType(gql.String)), - ], -) - - -PermissionSearchResult = gql.ObjectType( - name='PermissionSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(Permission)), - ], -) diff --git a/backend/dataall/api/Objects/Principal/__init__.py b/backend/dataall/api/Objects/Principal/__init__.py deleted file mode 100644 index eb9bec0be..000000000 --- a/backend/dataall/api/Objects/Principal/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from . import input_types, queries, resolvers, schema - -__all__ = ['resolvers', 'schema', 'input_types', 'queries'] diff --git a/backend/dataall/api/Objects/Principal/input_types.py b/backend/dataall/api/Objects/Principal/input_types.py deleted file mode 100644 index 09708cf0f..000000000 --- a/backend/dataall/api/Objects/Principal/input_types.py +++ /dev/null @@ -1,11 +0,0 @@ -from ... import gql - -PrincipalFilter = gql.InputType( - name='PrincipalFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument(name='principalType', type=gql.Ref('PrincipalType')), - gql.Argument(name='term', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Principal/resolvers.py b/backend/dataall/api/Objects/Principal/resolvers.py deleted file mode 100644 index 0f2ce7aa6..000000000 --- a/backend/dataall/api/Objects/Principal/resolvers.py +++ /dev/null @@ -1,29 +0,0 @@ -from .... import db - - -def get_principal(session, principalId, principalType=None, principalIAMRoleName=None, environmentUri=None, groupUri=None): - if principalType in ['Group', 'ConsumptionRole']: - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - organization = db.api.Organization.get_organization_by_uri( - session, environment.organizationUri - ) - if principalType in ['ConsumptionRole']: - principal = db.api.Environment.get_environment_consumption_role(session, principalId, environmentUri) - principalName = f"{principal.consumptionRoleName} [{principal.IAMRoleArn}]" - else: - principal = db.api.Environment.get_environment_group(session, groupUri, environmentUri) - principalName = f"{groupUri} [{principal.environmentIAMRoleArn}]" - - return { - 'principalId': principalId, - 'principalType': principalType, - 'principalName': principalName, - 'principalIAMRoleName': principalIAMRoleName, - 'SamlGroupName': groupUri, - 'environmentUri': environment.environmentUri, - 'environmentName': environment.label, - 'AwsAccountId': environment.AwsAccountId, - 'region': environment.region, - 'organizationUri': organization.organizationUri, - 'organizationName': organization.label, - } diff --git a/backend/dataall/api/Objects/Principal/schema.py b/backend/dataall/api/Objects/Principal/schema.py deleted file mode 100644 index 4237bf590..000000000 --- a/backend/dataall/api/Objects/Principal/schema.py +++ /dev/null @@ -1,36 +0,0 @@ -from ... import gql -from ....api.constants import PrincipalType - - -Principal = gql.ObjectType( - name='Principal', - fields=[ - gql.Field(name='principalId', type=gql.ID), - gql.Field(name='principalType', type=PrincipalType.toGraphQLEnum()), - gql.Field(name='principalName', type=gql.String), - gql.Field(name='principalIAMRoleName', type=gql.String), - gql.Field(name='SamlGroupName', type=gql.String), - gql.Field(name='environmentName', type=gql.String), - gql.Field(name='environmentUri', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='organizationName', type=gql.String), - gql.Field(name='organizationUri', type=gql.String), - ], -) - - -PrincipalSearchResult = gql.ObjectType( - name='PrincipalSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(Principal)), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - ], -) diff --git a/backend/dataall/api/Objects/RedshiftCluster/__init__.py b/backend/dataall/api/Objects/RedshiftCluster/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/RedshiftCluster/input_types.py b/backend/dataall/api/Objects/RedshiftCluster/input_types.py deleted file mode 100644 index 9a96b1740..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/input_types.py +++ /dev/null @@ -1,40 +0,0 @@ -from ... import gql - -NewClusterInput = gql.InputType( - name='NewClusterInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='nodeType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='numberOfNodes', type=gql.NonNullableType(gql.Integer)), - gql.Argument(name='masterDatabaseName', type=gql.NonNullableType(gql.String)), - gql.Argument(name='masterUsername', type=gql.NonNullableType(gql.String)), - gql.Argument(name='databaseName', type=gql.String), - gql.Argument(name='vpc', type=gql.NonNullableType(gql.String)), - gql.Argument(name='subnetIds', type=gql.ArrayType(gql.String)), - gql.Argument(name='securityGroupIds', type=gql.ArrayType(gql.String)), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='SamlGroupName', type=gql.String), - ], -) - -ImportClusterInput = gql.InputType( - name='ImportClusterInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='clusterIdentifier', type=gql.NonNullableType(gql.String)), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='databaseName', type=gql.String), - gql.Argument(name='SamlGroupName', type=gql.String), - ], -) - -RedshiftClusterDatasetFilter = gql.InputType( - name='RedshiftClusterDatasetFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/RedshiftCluster/mutations.py b/backend/dataall/api/Objects/RedshiftCluster/mutations.py deleted file mode 100644 index e4586b9fe..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/mutations.py +++ /dev/null @@ -1,100 +0,0 @@ -from ... import gql -from .resolvers import * - -createRedshiftCluster = gql.MutationField( - name='createRedshiftCluster', - args=[ - gql.Argument(name='environmentUri', type=gql.String), - gql.Argument(name='clusterInput', type=gql.Ref('NewClusterInput')), - ], - type=gql.Ref('RedshiftCluster'), - resolver=create, -) - -deleteRedshiftCluster = gql.MutationField( - name='deleteRedshiftCluster', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - type=gql.Boolean, - resolver=delete, -) - -rebootRedshiftCluster = gql.MutationField( - name='rebootRedshiftCluster', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - type=gql.Boolean, - resolver=reboot_cluster, -) - -resumeRedshiftCluster = gql.MutationField( - name='resumeRedshiftCluster', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - type=gql.Boolean, - resolver=resume_cluster, -) - -pauseRedshiftCluster = gql.MutationField( - name='pauseRedshiftCluster', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - type=gql.Boolean, - resolver=pause_cluster, -) - -importRedshiftCluster = gql.MutationField( - name='importRedshiftCluster', - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument( - name='clusterInput', type=gql.NonNullableType(gql.Ref('ImportClusterInput')) - ), - ], - type=gql.Ref('RedshiftCluster'), - resolver=import_cluster, -) - -addDatasetToRedshiftCluster = gql.MutationField( - name='addDatasetToRedshiftCluster', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, - resolver=add_dataset_to_cluster, -) - - -removeDatasetFromRedshiftCluster = gql.MutationField( - name='removeDatasetFromRedshiftCluster', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, - resolver=remove_dataset_from_cluster, -) - -enableRedshiftClusterDatasetTableCopy = gql.MutationField( - name='enableRedshiftClusterDatasetTableCopy', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='schema', type=gql.String), - gql.Argument(name='dataLocation', type=gql.String), - ], - type=gql.Boolean, - resolver=enable_dataset_table_copy, -) - -disableRedshiftClusterDatasetTableCopy = gql.MutationField( - name='disableRedshiftClusterDatasetTableCopy', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, - resolver=disable_dataset_table_copy, -) diff --git a/backend/dataall/api/Objects/RedshiftCluster/queries.py b/backend/dataall/api/Objects/RedshiftCluster/queries.py deleted file mode 100644 index 69cab7331..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/queries.py +++ /dev/null @@ -1,64 +0,0 @@ -from ... import gql -from .resolvers import * - -getRedshiftCluster = gql.QueryField( - name='getRedshiftCluster', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('RedshiftCluster'), - resolver=get_cluster, -) - - -getRedshiftClusterConsoleAccess = gql.QueryField( - name='getRedshiftClusterConsoleAccess', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_console_access, -) - -listRedshiftClusterAvailableDatasets = gql.QueryField( - name='listRedshiftClusterAvailableDatasets', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('RedshiftClusterDatasetFilter')), - ], - resolver=list_cluster_available_datasets, - type=gql.Ref('DatasetSearchResult'), -) - -listRedshiftClusterDatasets = gql.QueryField( - name='listRedshiftClusterDatasets', - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('RedshiftClusterDatasetFilter')), - ], - resolver=list_cluster_datasets, - type=gql.Ref('DatasetSearchResult'), -) - -listRedshiftClusterAvailableDatasetTables = gql.QueryField( - name='listRedshiftClusterAvailableDatasetTables', - type=gql.Ref('DatasetTableSearchResult'), - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetTableFilter')), - ], - resolver=list_available_cluster_dataset_tables, -) - -listRedshiftClusterCopiedDatasetTables = gql.QueryField( - name='listRedshiftClusterCopyEnabledTables', - type=gql.Ref('DatasetTableSearchResult'), - args=[ - gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='filter', type=gql.Ref('DatasetTableFilter')), - ], - resolver=list_copy_enabled_dataset_tables, -) - -getRedshiftClusterDatabaseCredentials = gql.QueryField( - name='getRedshiftClusterDatabaseCredentials', - args=[gql.Argument(name='clusterUri', type=gql.NonNullableType(gql.String))], - resolver=get_datahubdb_credentials, - type=gql.Ref('RedshiftClusterCredentials'), -) diff --git a/backend/dataall/api/Objects/RedshiftCluster/resolvers.py b/backend/dataall/api/Objects/RedshiftCluster/resolvers.py deleted file mode 100644 index 3ee0f17df..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/resolvers.py +++ /dev/null @@ -1,596 +0,0 @@ -import json -import logging - -from botocore.exceptions import ClientError - -from .... import db -from ...constants import RedshiftClusterRole -from ..Stack import stack_helper -from ....api.context import Context -from ....aws.handlers.redshift import Redshift -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.sts import SessionHelper -from ....db import permissions, models -from ....db.api import ResourcePolicy, KeyValueTag, Stack - -log = logging.getLogger(__name__) - - -def create( - context: Context, source, environmentUri: str = None, clusterInput: dict = None -): - - with context.engine.scoped_session() as session: - - cluster = db.api.RedshiftCluster.create( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data=clusterInput, - check_perm=True, - ) - - log.debug(f'Create Redshift Cluster Stack: {cluster}') - - stack = Stack.create_stack( - session=session, - environment_uri=cluster.environmentUri, - target_type='redshift', - target_uri=cluster.clusterUri, - target_label=cluster.label, - ) - cluster.CFNStackName = stack.name if stack else None - - stack_helper.deploy_stack(context=context, targetUri=cluster.clusterUri) - cluster.userRoleForCluster = RedshiftClusterRole.Creator.value - return cluster - - -def import_cluster(context: Context, source, environmentUri: str, clusterInput: dict): - - with context.engine.scoped_session() as session: - - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.CREATE_REDSHIFT_CLUSTER, - ) - db.api.Environment.check_group_environment_permission( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - group=clusterInput['SamlGroupName'], - permission_name=permissions.CREATE_REDSHIFT_CLUSTER, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - - aws_cluster_details = Redshift.describe_clusters( - **{ - 'accountid': environment.AwsAccountId, - 'region': environment.region, - 'cluster_id': clusterInput['clusterIdentifier'], - } - ) - - if not aws_cluster_details: - raise db.exceptions.AWSResourceNotFound( - action='IMPORT_REDSHIFT_CLUSTER', - message=f"{clusterInput['clusterIdentifier']} " - f'not found on AWS {environment.AwsAccountId}//{environment.region}', - ) - - cluster = models.RedshiftCluster( - environmentUri=environment.environmentUri, - organizationUri=environment.organizationUri, - owner=context.username, - label=clusterInput['label'], - description=clusterInput.get('description'), - tags=clusterInput.get('tags'), - region=environment.region, - AwsAccountId=environment.AwsAccountId, - imported=True, - SamlGroupName=clusterInput.get('SamlGroupName', environment.SamlGroupName), - ) - cluster = map_aws_details_to_model( - aws_cluster_details=aws_cluster_details, cluster=cluster - ) - session.add(cluster) - session.commit() - - stack = models.Stack( - targetUri=cluster.clusterUri, - accountid=cluster.AwsAccountId, - region=cluster.region, - stack='redshift', - ) - session.add(stack) - cluster.CFNStackName = f'stack-{stack.stackUri}' if stack else None - session.commit() - - redshift_assign_role_task = models.Task( - targetUri=cluster.clusterUri, - action='redshift.iam_roles.update', - ) - session.add(redshift_assign_role_task) - session.commit() - - log.info('Updating imported cluster iam_roles') - Worker.queue(engine=context.engine, task_ids=[redshift_assign_role_task.taskUri]) - - stack_helper.deploy_stack(context=context, targetUri=cluster.clusterUri) - - return cluster - - -def get_cluster(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.get_cluster( - session=session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data=None, - check_perm=True, - ) - - -def resolve_user_role(context: Context, source: models.RedshiftCluster): - if not source: - return None - if context.username and source.owner == context.username: - return RedshiftClusterRole.Creator.value - elif context.groups and source.SamlGroupName in context.groups: - return RedshiftClusterRole.Admin.value - return RedshiftClusterRole.NoPermission.value - - -def get_cluster_status(context: Context, source: models.RedshiftCluster): - if not source: - return None - with context.engine.scoped_session() as session: - try: - aws_cluster = Redshift.describe_clusters( - **{ - 'accountid': source.AwsAccountId, - 'region': source.region, - 'cluster_id': source.name, - } - ) - if aws_cluster: - map_aws_details_to_model(aws_cluster, source) - if not source.external_schema_created: - task_init_db = models.Task( - targetUri=source.clusterUri, - action='redshift.cluster.init_database', - ) - session.add(task_init_db) - session.commit() - Worker.queue(engine=context.engine, task_ids=[task_init_db.taskUri]) - - return source.status - except ClientError as e: - log.error(f'Failed to retrieve cluster status due to: {e}') - - -def map_aws_details_to_model(aws_cluster_details, cluster): - cluster.name = aws_cluster_details.get('ClusterIdentifier') - cluster.status = aws_cluster_details.get('ClusterStatus') - cluster.numberOfNodes = aws_cluster_details.get('NumberOfNodes') - cluster.masterUsername = aws_cluster_details.get('MasterUsername') - cluster.masterDatabaseName = aws_cluster_details.get('DBName') - cluster.endpoint = ( - aws_cluster_details.get('Endpoint').get('Address') - if aws_cluster_details.get('Endpoint') - else None - ) - cluster.port = ( - aws_cluster_details.get('Endpoint').get('Port') - if aws_cluster_details.get('Endpoint') - else None - ) - cluster.subnetGroupName = aws_cluster_details.get('ClusterSubnetGroupName') - cluster.IAMRoles = ( - [role.get('IamRoleArn') for role in aws_cluster_details.get('IamRoles')] - if aws_cluster_details.get('IamRoles') - else None - ) - cluster.nodeType = aws_cluster_details.get('NodeType') - cluster.securityGroupIds = ( - [ - vpc.get('VpcSecurityGroupId') - for vpc in aws_cluster_details.get('VpcSecurityGroups') - ] - if aws_cluster_details.get('VpcSecurityGroups') - else None - ) - cluster.vpc = aws_cluster_details.get('VpcId') - cluster.tags = ( - [{tag.get('Key'), tag.get('Value')} for tag in aws_cluster_details.get('tags')] - if aws_cluster_details.get('tags') - else None - ) - return cluster - - -def get_cluster_organization(context: Context, source: models.RedshiftCluster): - if not source: - return None - with context.engine.scoped_session() as session: - org = session.query(models.Organization).get(source.organizationUri) - return org - - -def get_cluster_environment(context: Context, source: models.RedshiftCluster): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.Environment.get_environment_by_uri(session, source.environmentUri) - - -def delete( - context: Context, source, clusterUri: str = None, deleteFromAWS: bool = False -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.DELETE_REDSHIFT_CLUSTER, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, cluster.environmentUri - ) - db.api.RedshiftCluster.delete_all_cluster_linked_objects(session, clusterUri) - - KeyValueTag.delete_key_value_tags(session, cluster.clusterUri, 'redshift') - - session.delete(cluster) - - ResourcePolicy.delete_resource_policy( - session=session, - resource_uri=clusterUri, - group=cluster.SamlGroupName, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - context=context, - target_uri=clusterUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='redshiftcluster', - ) - - return True - - -def pause_cluster(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.PAUSE_REDSHIFT_CLUSTER, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - Redshift.pause_cluster( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - } - ) - return True - - -def resume_cluster(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.RESUME_REDSHIFT_CLUSTER, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - Redshift.resume_cluster( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - } - ) - return True - - -def reboot_cluster(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.REBOOT_REDSHIFT_CLUSTER, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - Redshift.reboot_cluster( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - } - ) - return True - - -def get_console_access(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.GET_REDSHIFT_CLUSTER_CREDENTIALS, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - environment = db.api.Environment.get_environment_by_uri( - session, cluster.environmentUri - ) - pivot_session = SessionHelper.remote_session(environment.AwsAccountId) - aws_session = SessionHelper.get_session( - base_session=pivot_session, - role_arn=environment.EnvironmentDefaultIAMRoleArn, - ) - url = SessionHelper.get_console_access_url( - aws_session, region=cluster.region, redshiftcluster=cluster.name - ) - return url - - -def add_dataset_to_cluster( - context: Context, source, clusterUri: str = None, datasetUri: str = None -): - with context.engine.scoped_session() as session: - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - aws_cluster = Redshift.describe_clusters( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - } - ) - if aws_cluster: - map_aws_details_to_model(aws_cluster, cluster) - cluster, dataset = db.api.RedshiftCluster.add_dataset( - session=session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data={'datasetUri': datasetUri}, - check_perm=True, - ) - task = models.Task( - targetUri=cluster.clusterUri, - action='redshift.cluster.create_external_schema', - ) - session.add(task) - session.commit() - - Worker.queue(context.engine, [task.taskUri]) - return True - - -def remove_dataset_from_cluster( - context: Context, source, clusterUri: str = None, datasetUri: str = None -): - with context.engine.scoped_session() as session: - cluster, dataset = db.api.RedshiftCluster.remove_dataset_from_cluster( - session=session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data={'datasetUri': datasetUri}, - check_perm=True, - ) - if dataset.environmentUri != cluster.environmentUri: - database = f'{dataset.GlueDatabaseName}shared' - else: - database = dataset.GlueDatabaseName - task = models.Task( - targetUri=cluster.clusterUri, - action='redshift.cluster.drop_external_schema', - payload={'database': database}, - ) - session.add(task) - session.commit() - - Worker.queue(context.engine, [task.taskUri]) - return True - - -def list_cluster_available_datasets( - context: Context, source, clusterUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.list_available_datasets( - session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data=filter, - check_perm=True, - ) - - -def list_cluster_datasets( - context: Context, source, clusterUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.list_cluster_datasets( - session=session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data=filter, - check_perm=True, - ) - - -def list_available_cluster_dataset_tables( - context: Context, source, clusterUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.list_available_cluster_tables( - session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data=filter, - check_perm=True, - ) - - -def list_copy_enabled_dataset_tables( - context: Context, source, clusterUri: str = None, filter: dict = None -): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.list_copy_enabled_tables( - session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data=filter, - check_perm=True, - ) - - -def get_datahubdb_credentials(context: Context, source, clusterUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=clusterUri, - username=context.username, - groups=context.groups, - permission_name=permissions.GET_REDSHIFT_CLUSTER_CREDENTIALS, - ) - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - creds = Redshift.get_cluster_credentials( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - 'secret_name': cluster.datahubSecret, - } - ) - return { - 'clusterUri': clusterUri, - 'endpoint': cluster.endpoint, - 'port': cluster.port, - 'database': cluster.databaseName, - 'user': cluster.databaseUser, - 'password': creds, - } - - -def resolve_stack(context: Context, source: models.RedshiftCluster, **kwargs): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.clusterUri, - environmentUri=source.environmentUri, - ) - - -def enable_dataset_table_copy( - context: Context, - source, - clusterUri: str = None, - datasetUri: str = None, - tableUri: str = None, - schema: str = None, - dataLocation: str = None, -): - with context.engine.scoped_session() as session: - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, clusterUri - ) - db.api.RedshiftCluster.enable_copy_table( - session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data={ - 'datasetUri': datasetUri, - 'tableUri': tableUri, - 'schema': schema, - 'dataLocation': dataLocation, - }, - check_perm=True, - ) - log.info( - f'Redshift copy tableUri {tableUri} starting for cluster' - f'{cluster.name} in account {cluster.AwsAccountId}' - ) - task = models.Task( - action='redshift.subscriptions.copy', - targetUri=cluster.environmentUri, - payload={ - 'datasetUri': datasetUri, - 'message': json.dumps({'clusterUri': clusterUri}), - 'tableUri': tableUri, - }, - ) - session.add(task) - session.commit() - - Worker.queue(context.engine, [task.taskUri]) - return True - - -def disable_dataset_table_copy( - context: Context, - source, - clusterUri: str = None, - datasetUri: str = None, - tableUri: str = None, -): - with context.engine.scoped_session() as session: - return db.api.RedshiftCluster.disable_copy_table( - session, - username=context.username, - groups=context.groups, - uri=clusterUri, - data={'datasetUri': datasetUri, 'tableUri': tableUri}, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/RedshiftCluster/schema.py b/backend/dataall/api/Objects/RedshiftCluster/schema.py deleted file mode 100644 index 4852caa88..000000000 --- a/backend/dataall/api/Objects/RedshiftCluster/schema.py +++ /dev/null @@ -1,96 +0,0 @@ -from ... import gql -from .resolvers import * -from ....api.constants import RedshiftClusterRole - -RedshiftCluster = gql.ObjectType( - name='RedshiftCluster', - fields=[ - gql.Field(name='clusterUri', type=gql.ID), - gql.Field(name='environmentUri', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='label', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='clusterArn', type=gql.String), - gql.Field(name='clusterName', type=gql.String), - gql.Field(name='databaseName', type=gql.String), - gql.Field(name='databaseUser', type=gql.String), - gql.Field(name='datahubSecret', type=gql.String), - gql.Field(name='masterUsername', type=gql.String), - gql.Field(name='masterDatabaseName', type=gql.String), - gql.Field(name='masterSecret', type=gql.String), - gql.Field(name='nodeType', type=gql.String), - gql.Field(name='numberOfNodes', type=gql.Integer), - gql.Field(name='kmsAlias', type=gql.String), - gql.Field(name='subnetGroupName', type=gql.String), - gql.Field(name='CFNStackName', type=gql.String), - gql.Field(name='CFNStackStatus', type=gql.String), - gql.Field(name='CFNStackArn', type=gql.String), - gql.Field(name='port', type=gql.String), - gql.Field(name='endpoint', type=gql.String), - gql.Field(name='SamlGroupName', type=gql.String), - gql.Field(name='imported', type=gql.Boolean), - gql.Field(name='IAMRoles', type=gql.ArrayType(gql.String)), - gql.Field(name='vpc', type=gql.String), - gql.Field(name='subnetIds', type=gql.ArrayType(gql.String)), - gql.Field(name='securityGroupIds', type=gql.ArrayType(gql.String)), - gql.Field( - name='userRoleForCluster', - type=RedshiftClusterRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='userRoleInEnvironment', type=RedshiftClusterRole.toGraphQLEnum() - ), - gql.Field( - 'organization', - type=gql.Ref('Organization'), - resolver=get_cluster_organization, - ), - gql.Field( - 'environment', type=gql.Ref('Environment'), resolver=get_cluster_environment - ), - gql.Field('status', type=gql.String, resolver=get_cluster_status), - gql.Field(name='stack', type=gql.Ref('Stack'), resolver=resolve_stack), - ], -) - - -RedshiftClusterSearchResult = gql.ObjectType( - name='RedshiftClusterSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(RedshiftCluster)), - ], -) - -RedshiftClusterFilter = gql.InputType( - name='RedshiftClusterFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('roles', gql.ArrayType(gql.Ref('RedshiftClusterRole'))), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -RedshiftClusterCredentials = gql.ObjectType( - name='RedshiftClusterCredentials', - fields=[ - gql.Field(name='clusterUri', type=gql.ID), - gql.Field('endpoint', gql.String), - gql.Field('database', gql.String), - gql.Field('port', gql.Integer), - gql.Field('password', gql.String), - gql.Field('user', gql.String), - ], -) diff --git a/backend/dataall/api/Objects/SagemakerNotebook/__init__.py b/backend/dataall/api/Objects/SagemakerNotebook/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/SagemakerNotebook/input_types.py b/backend/dataall/api/Objects/SagemakerNotebook/input_types.py deleted file mode 100644 index 7db8bfa24..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/input_types.py +++ /dev/null @@ -1,38 +0,0 @@ -from ... import gql - -NewSagemakerNotebookInput = gql.InputType( - name='NewSagemakerNotebookInput ', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - gql.Argument('SamlAdminGroupName', gql.NonNullableType(gql.String)), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('topics', gql.String), - gql.Argument('VpcId', gql.String), - gql.Argument('SubnetId', gql.String), - gql.Argument('VolumeSizeInGB', gql.Integer), - gql.Argument('InstanceType', gql.String), - ], -) - -ModifySagemakerNotebookInput = gql.InputType( - name='ModifySagemakerNotebookInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('description', gql.String), - ], -) - -SagemakerNotebookFilter = gql.InputType( - name='SagemakerNotebookFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('sort', gql.String), - gql.Argument('limit', gql.Integer), - gql.Argument('offset', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/SagemakerNotebook/mutations.py b/backend/dataall/api/Objects/SagemakerNotebook/mutations.py deleted file mode 100644 index 895239797..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/mutations.py +++ /dev/null @@ -1,33 +0,0 @@ -from ... import gql -from .resolvers import * - -createSagemakerNotebook = gql.MutationField( - name='createSagemakerNotebook', - args=[gql.Argument(name='input', type=gql.Ref('NewSagemakerNotebookInput'))], - type=gql.Ref('SagemakerNotebook'), - resolver=create_notebook, -) - -startSagemakerNotebook = gql.MutationField( - name='startSagemakerNotebook', - args=[gql.Argument(name='notebookUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=start_notebook, -) - -stopSagemakerNotebook = gql.MutationField( - name='stopSagemakerNotebook', - args=[gql.Argument(name='notebookUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=stop_notebook, -) - -deleteSagemakerNotebook = gql.MutationField( - name='deleteSagemakerNotebook', - args=[ - gql.Argument(name='notebookUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - type=gql.String, - resolver=delete_notebook, -) diff --git a/backend/dataall/api/Objects/SagemakerNotebook/queries.py b/backend/dataall/api/Objects/SagemakerNotebook/queries.py deleted file mode 100644 index 54cc54c50..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/queries.py +++ /dev/null @@ -1,24 +0,0 @@ -from ... import gql -from .resolvers import * - -getSagemakerNotebook = gql.QueryField( - name='getSagemakerNotebook', - args=[gql.Argument(name='notebookUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('SagemakerNotebook'), - resolver=get_notebook, -) - - -listSagemakerNotebooks = gql.QueryField( - name='listSagemakerNotebooks', - args=[gql.Argument('filter', gql.Ref('SagemakerNotebookFilter'))], - type=gql.Ref('SagemakerNotebookSearchResult'), - resolver=list_notebooks, -) - -getSagemakerNotebookPresignedUrl = gql.QueryField( - name='getSagemakerNotebookPresignedUrl', - args=[gql.Argument(name='notebookUri', type=gql.NonNullableType(gql.String))], - type=gql.String, - resolver=get_notebook_presigned_url, -) diff --git a/backend/dataall/api/Objects/SagemakerNotebook/resolvers.py b/backend/dataall/api/Objects/SagemakerNotebook/resolvers.py deleted file mode 100644 index eb5f2c32f..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/resolvers.py +++ /dev/null @@ -1,218 +0,0 @@ -from .... import db -from ..Stack import stack_helper -from ....api.constants import SagemakerNotebookRole -from ....api.context import Context -from ....aws.handlers.sagemaker import Sagemaker -from ....db import permissions, models -from ....db.api import ResourcePolicy, Notebook, KeyValueTag, Stack - - -def create_notebook(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - - notebook = Notebook.create_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - - Stack.create_stack( - session=session, - environment_uri=notebook.environmentUri, - target_type='notebook', - target_uri=notebook.notebookUri, - target_label=notebook.label, - ) - - stack_helper.deploy_stack(context=context, targetUri=notebook.notebookUri) - - return notebook - - -def list_notebooks(context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return Notebook.paginated_user_notebooks( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def get_notebook(context, source, notebookUri: str = None): - with context.engine.scoped_session() as session: - return Notebook.get_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=notebookUri, - data=None, - check_perm=True, - ) - - -def resolve_status(context, source: models.SagemakerNotebook, **kwargs): - if not source: - return None - return Sagemaker.get_notebook_instance_status( - AwsAccountId=source.AWSAccountId, - region=source.region, - NotebookInstanceName=source.NotebookInstanceName, - ) - - -def start_notebook(context, source: models.SagemakerNotebook, notebookUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=notebookUri, - permission_name=permissions.UPDATE_NOTEBOOK, - ) - notebook = Notebook.get_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=notebookUri, - data=None, - check_perm=True, - ) - Sagemaker.start_instance( - notebook.AWSAccountId, notebook.region, notebook.NotebookInstanceName - ) - return 'Starting' - - -def stop_notebook(context, source: models.SagemakerNotebook, notebookUri: str = None): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=notebookUri, - permission_name=permissions.UPDATE_NOTEBOOK, - ) - notebook = Notebook.get_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=notebookUri, - data=None, - check_perm=True, - ) - Sagemaker.stop_instance( - notebook.AWSAccountId, notebook.region, notebook.NotebookInstanceName - ) - return 'Stopping' - - -def get_notebook_presigned_url( - context, source: models.SagemakerNotebook, notebookUri: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=notebookUri, - permission_name=permissions.GET_NOTEBOOK, - ) - notebook = Notebook.get_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=notebookUri, - data=None, - check_perm=True, - ) - url = Sagemaker.presigned_url( - notebook.AWSAccountId, notebook.region, notebook.NotebookInstanceName - ) - return url - - -def delete_notebook( - context, - source: models.SagemakerNotebook, - notebookUri: str = None, - deleteFromAWS: bool = None, -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=notebookUri, - permission_name=permissions.DELETE_NOTEBOOK, - groups=context.groups, - username=context.username, - ) - notebook = Notebook.get_notebook_by_uri(session, notebookUri) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, notebook.environmentUri - ) - - KeyValueTag.delete_key_value_tags(session, notebook.notebookUri, 'notebook') - - session.delete(notebook) - - ResourcePolicy.delete_resource_policy( - session=session, - resource_uri=notebook.notebookUri, - group=notebook.SamlAdminGroupName, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - context=context, - target_uri=notebookUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='notebook', - ) - - return True - - -def resolve_environment(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return session.query(models.Environment).get(source.environmentUri) - - -def resolve_organization(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - env: models.Environment = session.query(models.Environment).get( - source.environmentUri - ) - return session.query(models.Organization).get(env.organizationUri) - - -def resolve_user_role(context: Context, source: models.SagemakerNotebook): - if not source: - return None - if source.owner == context.username: - return SagemakerNotebookRole.Creator.value - elif context.groups and source.SamlAdminGroupName in context.groups: - return SagemakerNotebookRole.Admin.value - return SagemakerNotebookRole.NoPermission.value - - -def resolve_stack(context: Context, source: models.SagemakerNotebook, **kwargs): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.notebookUri, - environmentUri=source.environmentUri, - ) diff --git a/backend/dataall/api/Objects/SagemakerNotebook/schema.py b/backend/dataall/api/Objects/SagemakerNotebook/schema.py deleted file mode 100644 index 61e5c6bb5..000000000 --- a/backend/dataall/api/Objects/SagemakerNotebook/schema.py +++ /dev/null @@ -1,54 +0,0 @@ -from ... import gql -from .resolvers import * - -SagemakerNotebook = gql.ObjectType( - name='SagemakerNotebook', - fields=[ - gql.Field(name='notebookUri', type=gql.ID), - gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='name', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field(name='VpcId', type=gql.String), - gql.Field(name='SubnetId', type=gql.String), - gql.Field(name='InstanceType', type=gql.String), - gql.Field(name='RoleArn', type=gql.String), - gql.Field(name='VolumeSizeInGB', type=gql.Integer), - gql.Field( - name='userRoleForNotebook', - type=SagemakerNotebookRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='NotebookInstanceStatus', type=gql.String, resolver=resolve_status - ), - gql.Field( - name='environment', - type=gql.Ref('Environment'), - resolver=resolve_environment, - ), - gql.Field( - name='organization', - type=gql.Ref('Organization'), - resolver=resolve_organization, - ), - gql.Field(name='stack', type=gql.Ref('Stack'), resolver=resolve_stack), - ], -) - -SagemakerNotebookSearchResult = gql.ObjectType( - name='SagemakerNotebookSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(SagemakerNotebook)), - ], -) diff --git a/backend/dataall/api/Objects/SagemakerStudio/__init__.py b/backend/dataall/api/Objects/SagemakerStudio/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/SagemakerStudio/input_types.py b/backend/dataall/api/Objects/SagemakerStudio/input_types.py deleted file mode 100644 index b5af0c3c8..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/input_types.py +++ /dev/null @@ -1,34 +0,0 @@ -from ... import gql - -NewSagemakerStudioUserProfileInput = gql.InputType( - name='NewSagemakerStudioUserProfileInput', - arguments=[ - gql.Argument('label', gql.NonNullableType(gql.String)), - gql.Argument('description', gql.String), - gql.Argument('environmentUri', gql.NonNullableType(gql.String)), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('topics', gql.String), - gql.Argument('SamlAdminGroupName', gql.NonNullableType(gql.String)), - ], -) - -ModifySagemakerStudioUserProfileInput = gql.InputType( - name='ModifySagemakerStudioUserProfileInput', - arguments=[ - gql.Argument('label', gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument('description', gql.String), - ], -) - -SagemakerStudioUserProfileFilter = gql.InputType( - name='SagemakerStudioUserProfileFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('sort', gql.String), - gql.Argument('limit', gql.Integer), - gql.Argument('offset', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/SagemakerStudio/mutations.py b/backend/dataall/api/Objects/SagemakerStudio/mutations.py deleted file mode 100644 index c3859a62a..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/mutations.py +++ /dev/null @@ -1,27 +0,0 @@ -from ... import gql -from .resolvers import * - -createSagemakerStudioUserProfile = gql.MutationField( - name='createSagemakerStudioUserProfile', - args=[ - gql.Argument( - name='input', - type=gql.NonNullableType(gql.Ref('NewSagemakerStudioUserProfileInput')), - ) - ], - type=gql.Ref('SagemakerStudioUserProfile'), - resolver=create_sagemaker_studio_user_profile, -) - -deleteSagemakerStudioUserProfile = gql.MutationField( - name='deleteSagemakerStudioUserProfile', - args=[ - gql.Argument( - name='sagemakerStudioUserProfileUri', - type=gql.NonNullableType(gql.String), - ), - gql.Argument(name='deleteFromAWS', type=gql.Boolean), - ], - type=gql.String, - resolver=delete_sagemaker_studio_user_profile, -) diff --git a/backend/dataall/api/Objects/SagemakerStudio/queries.py b/backend/dataall/api/Objects/SagemakerStudio/queries.py deleted file mode 100644 index dacbc3f85..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/queries.py +++ /dev/null @@ -1,42 +0,0 @@ -from ... import gql -from .resolvers import * - -getSagemakerStudioUserProfile = gql.QueryField( - name='getSagemakerStudioUserProfile', - args=[ - gql.Argument( - name='sagemakerStudioUserProfileUri', type=gql.NonNullableType(gql.String) - ) - ], - type=gql.Ref('SagemakerStudioUserProfile'), - resolver=get_sagemaker_studio_user_profile, -) - -getSagemakerStudioUserProfileApps = gql.QueryField( - name='getSagemakerStudioUserProfileApps', - args=[ - gql.Argument( - name='sagemakerStudioUserProfileUri', type=gql.NonNullableType(gql.String) - ) - ], - type=gql.ArrayType(gql.Ref('SagemakerStudioUserProfileApps')), - resolver=get_user_profile_applications, -) - -listSagemakerStudioUserProfiles = gql.QueryField( - name='listSagemakerStudioUserProfiles', - args=[gql.Argument('filter', gql.Ref('SagemakerStudioUserProfileFilter'))], - type=gql.Ref('SagemakerStudioUserProfileSearchResult'), - resolver=list_sm_studio_user_profile, -) - -getSagemakerStudioUserProfilePresignedUrl = gql.QueryField( - name='getSagemakerStudioUserProfilePresignedUrl', - args=[ - gql.Argument( - name='sagemakerStudioUserProfileUri', type=gql.NonNullableType(gql.String) - ) - ], - type=gql.String, - resolver=get_sagemaker_studio_user_profile_presigned_url, -) diff --git a/backend/dataall/api/Objects/SagemakerStudio/resolvers.py b/backend/dataall/api/Objects/SagemakerStudio/resolvers.py deleted file mode 100644 index 32d6bffa2..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/resolvers.py +++ /dev/null @@ -1,249 +0,0 @@ -import logging - -from ..Stack import stack_helper -from .... import db -from ....api.constants import SagemakerStudioRole -from ....api.context import Context -from ....aws.handlers.sagemaker_studio import ( - SagemakerStudio, -) -from ....db import exceptions, permissions, models -from ....db.api import ResourcePolicy, Stack - -log = logging.getLogger(__name__) - - -def create_sagemaker_studio_user_profile(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - if not input.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not input.get('label'): - raise exceptions.RequiredParameter('name') - - environment_uri = input.get('environmentUri') - - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environment_uri, - permission_name=permissions.CREATE_SGMSTUDIO_NOTEBOOK, - ) - - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, environment_uri - ) - - if not env.mlStudiosEnabled: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SGMSTUDIO_NOTEBOOK, - message=f'ML Studio feature is disabled for the environment {env.label}', - ) - - existing_domain = SagemakerStudio.get_sagemaker_studio_domain( - env.AwsAccountId, env.region - ) - input['domain_id'] = existing_domain.get('DomainId', False) - - if not input['domain_id']: - raise exceptions.AWSResourceNotAvailable( - action='Sagemaker Studio domain', - message='Add a VPC to your environment and update the environment stack ' - 'or create a Sagemaker studio domain on your AWS account.', - ) - - sm_user_profile = db.api.SgmStudioNotebook.create_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=env.environmentUri, - data=input, - check_perm=True, - ) - - Stack.create_stack( - session=session, - environment_uri=sm_user_profile.environmentUri, - target_type='sagemakerstudiouserprofile', - target_uri=sm_user_profile.sagemakerStudioUserProfileUri, - target_label=sm_user_profile.label, - ) - - stack_helper.deploy_stack( - context=context, targetUri=sm_user_profile.sagemakerStudioUserProfileUri - ) - - return sm_user_profile - - -def list_sm_studio_user_profile(context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.SgmStudioNotebook.paginated_user_notebooks( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def get_sagemaker_studio_user_profile( - context, source, sagemakerStudioUserProfileUri: str = None -) -> models.SagemakerStudioUserProfile: - with context.engine.scoped_session() as session: - return db.api.SgmStudioNotebook.get_notebook( - session=session, - username=context.username, - groups=context.groups, - uri=sagemakerStudioUserProfileUri, - data=None, - check_perm=True, - ) - - -def resolve_user_role(context: Context, source: models.SagemakerStudioUserProfile): - if source.owner == context.username: - return SagemakerStudioRole.Creator.value - elif context.groups and source.SamlAdminGroupName in context.groups: - return SagemakerStudioRole.Admin.value - return SagemakerStudioRole.NoPermission.value - - -def resolve_status(context, source: models.SagemakerStudioUserProfile, **kwargs): - if not source: - return None - try: - user_profile_status = SagemakerStudio.get_user_profile_status( - AwsAccountId=source.AWSAccountId, - region=source.region, - sagemakerStudioDomainID=source.sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify=source.sagemakerStudioUserProfileNameSlugify, - ) - with context.engine.scoped_session() as session: - sm_user_profile = session.query(models.SagemakerStudioUserProfile).get( - source.sagemakerStudioUserProfileUri - ) - sm_user_profile.sagemakerStudioUserProfileStatus = user_profile_status - return user_profile_status - except Exception: - return 'NOT FOUND' - - -def get_sagemaker_studio_user_profile_presigned_url( - context, - source: models.SagemakerStudioUserProfile, - sagemakerStudioUserProfileUri: str, -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=sagemakerStudioUserProfileUri, - permission_name=permissions.SGMSTUDIO_NOTEBOOK_URL, - groups=context.groups, - username=context.username, - ) - sm_user_profile = db.api.SgmStudioNotebook.get_notebook_by_uri( - session, sagemakerStudioUserProfileUri - ) - - url = SagemakerStudio.presigned_url( - AwsAccountId=sm_user_profile.AWSAccountId, - region=sm_user_profile.region, - sagemakerStudioDomainID=sm_user_profile.sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify=sm_user_profile.sagemakerStudioUserProfileNameSlugify, - ) - return url - - -def get_user_profile_applications(context, source: models.SagemakerStudioUserProfile): - if not source: - return None - with context.engine.scoped_session() as session: - sm_user_profile = get_sagemaker_studio_user_profile( - context, - source=source, - sagemakerStudioUserProfileUri=source.sagemakerStudioUserProfileUri, - ) - - user_profiles_applications = SagemakerStudio.get_user_profile_applications( - AwsAccountId=sm_user_profile.AWSAccountId, - region=sm_user_profile.region, - sagemakerStudioDomainID=sm_user_profile.sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify=sm_user_profile.sagemakerStudioUserProfileNameSlugify, - ) - - return user_profiles_applications - - -def delete_sagemaker_studio_user_profile( - context, - source: models.SagemakerStudioUserProfile, - sagemakerStudioUserProfileUri: str = None, - deleteFromAWS: bool = None, -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - resource_uri=sagemakerStudioUserProfileUri, - permission_name=permissions.DELETE_SGMSTUDIO_NOTEBOOK, - groups=context.groups, - username=context.username, - ) - sm_user_profile = db.api.SgmStudioNotebook.get_notebook_by_uri( - session, sagemakerStudioUserProfileUri - ) - env: models.Environment = db.api.Environment.get_environment_by_uri( - session, sm_user_profile.environmentUri - ) - - session.delete(sm_user_profile) - - ResourcePolicy.delete_resource_policy( - session=session, - resource_uri=sm_user_profile.sagemakerStudioUserProfileUri, - group=sm_user_profile.SamlAdminGroupName, - ) - - if deleteFromAWS: - stack_helper.delete_stack( - context=context, - target_uri=sagemakerStudioUserProfileUri, - accountid=env.AwsAccountId, - cdk_role_arn=env.CDKRoleArn, - region=env.region, - target_type='notebook', - ) - - return True - - -def resolve_environment(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return session.query(models.Environment).get(source.environmentUri) - - -def resolve_organization(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - env: models.Environment = session.query(models.Environment).get( - source.environmentUri - ) - return session.query(models.Organization).get(env.organizationUri) - - -def resolve_stack( - context: Context, source: models.SagemakerStudioUserProfile, **kwargs -): - if not source: - return None - return stack_helper.get_stack_with_cfn_resources( - context=context, - targetUri=source.sagemakerStudioUserProfileUri, - environmentUri=source.environmentUri, - ) diff --git a/backend/dataall/api/Objects/SagemakerStudio/schema.py b/backend/dataall/api/Objects/SagemakerStudio/schema.py deleted file mode 100644 index b19f1967b..000000000 --- a/backend/dataall/api/Objects/SagemakerStudio/schema.py +++ /dev/null @@ -1,117 +0,0 @@ -from ... import gql -from .resolvers import * -from ....api.constants import SagemakerStudioRole - -SagemakerStudio = gql.ObjectType( - name='SagemakerStudio', - fields=[ - gql.Field(name='sagemakerStudioUri', type=gql.ID), - gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='name', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field( - name='userRoleForSagemakerStudio', - type=SagemakerStudioRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='SagemakerStudioStatus', type=gql.String, resolver=resolve_status - ), - gql.Field( - name='environment', - type=gql.Ref('Environment'), - resolver=resolve_environment, - ), - gql.Field( - name='organization', - type=gql.Ref('Organization'), - resolver=resolve_organization, - ), - gql.Field(name='stack', type=gql.Ref('Stack'), resolver=resolve_stack), - ], -) - -SagemakerStudioSearchResult = gql.ObjectType( - name='SagemakerStudioSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(SagemakerStudio)), - ], -) - -SagemakerStudioUserProfileApps = gql.ArrayType( - gql.ObjectType( - name='SagemakerStudioUserProfileApps', - fields=[ - gql.Field(name='DomainId', type=gql.String), - gql.Field(name='UserProfileName', type=gql.String), - gql.Field(name='AppType', type=gql.String), - gql.Field(name='AppName', type=gql.String), - gql.Field(name='Status', type=gql.String), - ], - ) -) - -SagemakerStudioUserProfile = gql.ObjectType( - name='SagemakerStudioUserProfile', - fields=[ - gql.Field(name='sagemakerStudioUserProfileUri', type=gql.ID), - gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='label', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='name', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field( - name='userRoleForSagemakerStudioUserProfile', - type=SagemakerStudioRole.toGraphQLEnum(), - resolver=resolve_user_role, - ), - gql.Field( - name='sagemakerStudioUserProfileStatus', - type=gql.String, - resolver=resolve_status, - ), - gql.Field( - name='sagemakerStudioUserProfileApps', - type=SagemakerStudioUserProfileApps, - resolver=get_user_profile_applications, - ), - gql.Field( - name='environment', - type=gql.Ref('Environment'), - resolver=resolve_environment, - ), - gql.Field( - name='organization', - type=gql.Ref('Organization'), - resolver=resolve_organization, - ), - gql.Field(name='stack', type=gql.Ref('Stack'), resolver=resolve_stack), - ], -) - -SagemakerStudioUserProfileSearchResult = gql.ObjectType( - name='SagemakerStudioUserProfileSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(SagemakerStudioUserProfile)), - ], -) diff --git a/backend/dataall/api/Objects/ShareObject/__init__.py b/backend/dataall/api/Objects/ShareObject/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/ShareObject/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/ShareObject/input_types.py b/backend/dataall/api/Objects/ShareObject/input_types.py deleted file mode 100644 index 0b7828825..000000000 --- a/backend/dataall/api/Objects/ShareObject/input_types.py +++ /dev/null @@ -1,77 +0,0 @@ -from ....api.constants import * - - -NewShareObjectInput = gql.InputType( - name='NewShareObjectInput', - arguments=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='principalType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='requestPurpose', type=gql.String), - ], -) - - -AddSharedItemInput = gql.InputType( - name='AddSharedItemInput', - arguments=[ - gql.Argument(name='itemUri', type=gql.NonNullableType(gql.String)), - gql.Argument( - name='itemType', type=gql.NonNullableType(ShareableType.toGraphQLEnum()) - ), - ], -) - - -RevokeItemsInput = gql.InputType( - name='RevokeItemsInput', - arguments=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='revokedItemUris', type=gql.NonNullableType(gql.ArrayType(gql.String))), - ], -) - - -class ShareSortField(GraphQLEnumMapper): - created = 'created' - updated = 'updated' - label = 'label' - - -ShareSortCriteria = gql.InputType( - name='ShareSortCriteria', - arguments=[ - gql.Argument( - name='field', type=gql.NonNullableType(ShareSortField.toGraphQLEnum()) - ), - gql.Argument( - name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) - ), - ], -) - -ShareObjectFilter = gql.InputType( - name='ShareObjectFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument('sort', gql.ArrayType(ShareSortCriteria)), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - gql.Argument('roles', gql.ArrayType(OrganisationUserRole.toGraphQLEnum())), - gql.Argument('tags', gql.ArrayType(gql.String)), - ], -) - - -ShareableObjectFilter = gql.InputType( - name='ShareableObjectFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument('tags', gql.ArrayType(gql.String)), - gql.Argument(name='isShared', type=gql.Boolean), - gql.Argument(name='isRevokable', type=gql.Boolean), - gql.Argument('page', gql.Integer), - gql.Argument('pageSize', gql.Integer), - ], -) diff --git a/backend/dataall/api/Objects/ShareObject/mutations.py b/backend/dataall/api/Objects/ShareObject/mutations.py deleted file mode 100644 index 68e7d18d8..000000000 --- a/backend/dataall/api/Objects/ShareObject/mutations.py +++ /dev/null @@ -1,93 +0,0 @@ -from .resolvers import * - -createShareObject = gql.MutationField( - name='createShareObject', - args=[ - gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='itemUri', type=gql.String), - gql.Argument(name='itemType', type=gql.String), - gql.Argument( - name='input', type=gql.NonNullableType(gql.Ref('NewShareObjectInput')) - ), - ], - type=gql.Ref('ShareObject'), - resolver=create_share_object, -) - -deleteShareObject = gql.MutationField( - name='deleteShareObject', - args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], - resolver=delete_share_object, - type=gql.Boolean -) - -addSharedItem = gql.MutationField( - name='addSharedItem', - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('AddSharedItemInput')), - ], - type=gql.Ref('ShareItem'), - resolver=add_shared_item, -) - - -removeSharedItem = gql.MutationField( - name='removeSharedItem', - args=[gql.Argument(name='shareItemUri', type=gql.NonNullableType(gql.String))], - resolver=remove_shared_item, - type=gql.Boolean, -) - -submitShareObject = gql.MutationField( - name='submitShareObject', - args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('ShareObject'), - resolver=submit_share_object, -) - -approveShareObject = gql.MutationField( - name='approveShareObject', - args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('ShareObject'), - resolver=approve_share_object, -) - - -rejectShareObject = gql.MutationField( - name='rejectShareObject', - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='rejectPurpose', type=gql.String), - ], - type=gql.Ref('ShareObject'), - resolver=reject_share_object, -) - -revokeItemsShareObject = gql.MutationField( - name='revokeItemsShareObject', - args=[gql.Argument(name='input', type=gql.Ref('RevokeItemsInput'))], - type=gql.Ref('ShareObject'), - resolver=revoke_items_share_object, -) - -updateShareRejectReason = gql.MutationField( - name='updateShareRejectReason', - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='rejectPurpose', type=gql.String) - ], - type=gql.Boolean, - resolver=update_share_reject_purpose, -) - - -updateShareRequestReason = gql.MutationField( - name='updateShareRequestReason', - args=[ - gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='requestPurpose', type=gql.String) - ], - type=gql.Boolean, - resolver=update_share_request_purpose, -) diff --git a/backend/dataall/api/Objects/ShareObject/queries.py b/backend/dataall/api/Objects/ShareObject/queries.py deleted file mode 100644 index e74be6b03..000000000 --- a/backend/dataall/api/Objects/ShareObject/queries.py +++ /dev/null @@ -1,23 +0,0 @@ -from .resolvers import * - -getShareObject = gql.QueryField( - name='getShareObject', - args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('ShareObject'), - resolver=get_share_object, -) - - -getShareRequestsFromMe = gql.QueryField( - name='getShareRequestsFromMe', - args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], - type=gql.Ref('ShareSearchResult'), - resolver=list_shares_in_my_outbox, -) - -getShareRequestsToMe = gql.QueryField( - name='getShareRequestsToMe', - args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], - type=gql.Ref('ShareSearchResult'), - resolver=list_shares_in_my_inbox, -) diff --git a/backend/dataall/api/Objects/ShareObject/resolvers.py b/backend/dataall/api/Objects/ShareObject/resolvers.py deleted file mode 100644 index bdc03545d..000000000 --- a/backend/dataall/api/Objects/ShareObject/resolvers.py +++ /dev/null @@ -1,395 +0,0 @@ -import logging - - -from .... import db -from .... import utils -from ....api.constants import * -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....db import models - -log = logging.getLogger(__name__) - - -def get_share_object_dataset(context, source, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - share: models.ShareObject = session.query(models.ShareObject).get( - source.shareUri - ) - return session.query(models.Dataset).get(share.datasetUri) - - -def create_share_object( - context: Context, - source, - datasetUri: str = None, - itemUri: str = None, - itemType: str = None, - input: dict = None, -): - - with context.engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, datasetUri) - environment: models.Environment = db.api.Environment.get_environment_by_uri( - session, input['environmentUri'] - ) - input['dataset'] = dataset - input['environment'] = environment - input['itemUri'] = itemUri - input['itemType'] = itemType - input['datasetUri'] = datasetUri - return db.api.ShareObject.create_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=environment.environmentUri, - data=input, - check_perm=True, - ) - - -def submit_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - return db.api.ShareObject.submit_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data=None, - check_perm=True, - ) - - -def approve_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - share = db.api.ShareObject.approve_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data=None, - check_perm=True, - ) - - approve_share_task: models.Task = models.Task( - action='ecs.share.approve', - targetUri=shareUri, - payload={'environmentUri': share.environmentUri}, - ) - session.add(approve_share_task) - - Worker.queue(engine=context.engine, task_ids=[approve_share_task.taskUri]) - - return share - - -def reject_share_object(context: Context, source, shareUri: str = None, rejectPurpose: str = None,): - with context.engine.scoped_session() as session: - return db.api.ShareObject.reject_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data={"rejectPurpose": rejectPurpose}, - check_perm=True, - ) - - -def revoke_items_share_object(context: Context, source, input): - with context.engine.scoped_session() as session: - share = db.api.ShareObject.revoke_items_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=input.get("shareUri"), - data=input, - check_perm=True, - ) - - revoke_share_task: models.Task = models.Task( - action='ecs.share.revoke', - targetUri=input.get("shareUri"), - payload={'environmentUri': share.environmentUri}, - ) - session.add(revoke_share_task) - - Worker.queue(engine=context.engine, task_ids=[revoke_share_task.taskUri]) - - return share - - -def delete_share_object(context: Context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - share = db.api.ShareObject.get_share_by_uri(session, shareUri) - if not share: - raise db.exceptions.ObjectNotFound('ShareObject', shareUri) - - db.api.ShareObject.delete_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - check_perm=True, - ) - - return True - - -def add_shared_item(context, source, shareUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - share_item = db.api.ShareObject.add_share_object_item( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data=input, - check_perm=True, - ) - return share_item - - -def remove_shared_item(context, source, shareItemUri: str = None): - with context.engine.scoped_session() as session: - share_item: models.ShareObjectItem = session.query(models.ShareObjectItem).get( - shareItemUri - ) - if not share_item: - raise db.exceptions.ObjectNotFound('ShareObjectItem', shareItemUri) - share = db.api.ShareObject.get_share_by_uri(session, share_item.shareUri) - db.api.ShareObject.remove_share_object_item( - session=session, - username=context.username, - groups=context.groups, - uri=share.shareUri, - data={ - 'shareItemUri': shareItemUri, - 'share_item': share_item, - 'share': share, - }, - check_perm=True, - ) - return True - - -def list_shared_items( - context: Context, source: models.ShareObject, filter: dict = None -): - if not source: - return None - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.ShareObject.list_shared_items( - session=session, - username=context.username, - groups=context.groups, - uri=source.shareUri, - data=filter, - check_perm=True, - ) - - -def resolve_shared_item(context, source: models.ShareObjectItem, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.ShareObject.get_share_item( - session=session, - username=context.username, - groups=context.groups, - uri=source.shareUri, - data={'share_item': source}, - check_perm=True, - ) - - -def get_share_object(context, source, shareUri: str = None): - with context.engine.scoped_session() as session: - return db.api.ShareObject.get_share_object( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data=None, - check_perm=True, - ) - - -def resolve_user_role(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) - if ( - dataset and ( - dataset.stewards in context.groups - or dataset.SamlAdminGroupName in context.groups - or dataset.owner == context.username - ) - ): - return ShareObjectPermission.Approvers.value - if ( - source.owner == context.username - or source.groupUri in context.groups - ): - return ShareObjectPermission.Requesters.value - else: - return ShareObjectPermission.NoPermission.value - - -def resolve_dataset(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - ds: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) - if ds: - env: models.Environment = db.api.Environment.get_environment_by_uri(session, ds.environmentUri) - return { - 'datasetUri': source.datasetUri, - 'datasetName': ds.name if ds else 'NotFound', - 'SamlAdminGroupName': ds.SamlAdminGroupName if ds else 'NotFound', - 'environmentName': env.label if env else 'NotFound', - 'AwsAccountId': env.AwsAccountId if env else 'NotFound', - 'region': env.region if env else 'NotFound', - 'exists': True if ds else False, - } - - -def union_resolver(object, *_): - if isinstance(object, models.DatasetTable): - return 'DatasetTable' - elif isinstance(object, models.DatasetStorageLocation): - return 'DatasetStorageLocation' - - -def resolve_principal(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - from ..Principal.resolvers import get_principal - - with context.engine.scoped_session() as session: - return get_principal( - session, source.principalId, source.principalType, source.principalIAMRoleName, source.environmentUri, source.groupUri - ) - - -def resolve_environment(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - environment = db.api.Environment.get_environment_by_uri( - session, source.environmentUri - ) - return environment - - -def resolve_group(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - return source.groupUri - - -def resolve_consumption_data(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - ds: models.Dataset = db.api.Dataset.get_dataset_by_uri(session, source.datasetUri) - if ds: - S3AccessPointName = utils.slugify( - source.datasetUri + '-' + source.principalId, - max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' - ) - return { - 's3AccessPointName': S3AccessPointName, - 'sharedGlueDatabase': (ds.GlueDatabaseName + '_shared_' + source.shareUri)[:254] if ds else 'Not created', - } - - -def resolve_share_object_statistics(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.ShareObject.resolve_share_object_statistics( - session, source.shareUri - ) - - -def resolve_existing_shared_items(context: Context, source: models.ShareObject, **kwargs): - if not source: - return None - with context.engine.scoped_session() as session: - return db.api.ShareObject.check_existing_shared_items( - session, source.shareUri - ) - - -def list_shareable_objects( - context: Context, source: models.ShareObject, filter: dict = None -): - if not source: - return None - if not filter: - filter = {'page': 1, 'pageSize': 5} - with context.engine.scoped_session() as session: - return db.api.ShareObject.list_shareable_items( - session=session, - username=context.username, - groups=context.groups, - uri=source.shareUri, - data=filter, - check_perm=True, - ) - - -def list_shares_in_my_inbox(context: Context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.ShareObject.list_user_received_share_requests( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=None, - ) - - -def list_shares_in_my_outbox(context: Context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.ShareObject.list_user_sent_share_requests( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=None, - ) - - -def update_share_request_purpose(context: Context, source, shareUri: str = None, requestPurpose: str = None): - with context.engine.scoped_session() as session: - return db.api.ShareObject.update_share_request_purpose( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data={"requestPurpose": requestPurpose}, - check_perm=True, - ) - - -def update_share_reject_purpose(context: Context, source, shareUri: str = None, rejectPurpose: str = None): - with context.engine.scoped_session() as session: - return db.api.ShareObject.update_share_reject_purpose( - session=session, - username=context.username, - groups=context.groups, - uri=shareUri, - data={"rejectPurpose": rejectPurpose}, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/ShareObject/schema.py b/backend/dataall/api/Objects/ShareObject/schema.py deleted file mode 100644 index 4bcf806a9..000000000 --- a/backend/dataall/api/Objects/ShareObject/schema.py +++ /dev/null @@ -1,169 +0,0 @@ -from .resolvers import * - -ShareableObject = gql.Union( - name='ShareableObject', - types=[gql.Ref('DatasetTable'), gql.Ref('DatasetStorageLocation')], - resolver=union_resolver, -) - - -ShareItem = gql.ObjectType( - name='ShareItem', - fields=[ - gql.Field(name='shareUri', type=gql.String), - gql.Field(name='shareItemUri', type=gql.ID), - gql.Field('itemUri', gql.String), - gql.Field(name='status', type=gql.Ref('ShareItemStatus')), - gql.Field(name='action', type=gql.String), - gql.Field('itemType', ShareableType.toGraphQLEnum()), - gql.Field('itemName', gql.String), - gql.Field('description', gql.String), - gql.Field( - name='sharedObject', - type=gql.Ref('ShareableObject'), - resolver=resolve_shared_item, - ), - # gql.Field(name="permission", type=gql.String) - ], -) - -NotSharedItem = gql.ObjectType( - name='NotSharedItem', - fields=[ - gql.Field('itemUri', gql.String), - gql.Field('shareItemUri', gql.String), - gql.Field('itemType', ShareableType.toGraphQLEnum()), - gql.Field('label', gql.String), - # gql.Field("permission", DatasetRole.toGraphQLEnum()), - gql.Field('tags', gql.ArrayType(gql.String)), - gql.Field('created', gql.String), - ], -) - - -NotSharedItemsSearchResult = gql.ObjectType( - name='NotSharedItemsSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(NotSharedItem)), - ], -) - - -SharedItemSearchResult = gql.ObjectType( - name='SharedItemSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('ShareItem'))), - ], -) - -ShareObjectStatistic = gql.ObjectType( - name='ShareObjectStatistic', - fields=[ - gql.Field(name='locations', type=gql.Integer), - gql.Field(name='tables', type=gql.Integer), - gql.Field(name='sharedItems', type=gql.Integer), - gql.Field(name='revokedItems', type=gql.Integer), - gql.Field(name='failedItems', type=gql.Integer), - gql.Field(name='pendingItems', type=gql.Integer), - ], -) - -DatasetLink = gql.ObjectType( - name='DatasetLink', - fields=[ - gql.Field(name='datasetUri', type=gql.String), - gql.Field(name='datasetName', type=gql.String), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field(name='environmentName', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='exists', type=gql.Boolean), - ], -) - -ConsumptionData = gql.ObjectType( - name='ConsumptionData', - fields=[ - gql.Field(name='s3AccessPointName', type=gql.String), - gql.Field(name='sharedGlueDatabase', type=gql.String), - ], -) - -ShareObject = gql.ObjectType( - name='ShareObject', - fields=[ - gql.Field(name='shareUri', type=gql.ID), - gql.Field(name='status', type=gql.Ref('ShareObjectStatus')), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='deleted', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='datasetUri', type=gql.String), - gql.Field(name='requestPurpose', type=gql.String), - gql.Field(name='rejectPurpose', type=gql.String), - gql.Field(name='dataset', type=DatasetLink, resolver=resolve_dataset), - gql.Field(name='consumptionData', type=gql.Ref('ConsumptionData'), resolver=resolve_consumption_data), - gql.Field(name='existingSharedItems', type=gql.Boolean, resolver=resolve_existing_shared_items), - gql.Field( - name='statistics', - type=gql.Ref('ShareObjectStatistic'), - resolver=resolve_share_object_statistics, - ), - gql.Field( - name='principal', resolver=resolve_principal, type=gql.Ref('Principal') - ), - gql.Field( - name='environment', - resolver=resolve_environment, - type=gql.Ref('Environment'), - ), - gql.Field( - name='group', - resolver=resolve_group, - type=gql.String, - ), - gql.Field( - 'items', - args=[gql.Argument(name='filter', type=gql.Ref('ShareableObjectFilter'))], - type=gql.Ref('SharedItemSearchResult'), - resolver=list_shareable_objects, - ), - gql.Field( - name='userRoleForShareObject', - type=gql.Ref('ShareObjectPermission'), - resolver=resolve_user_role, - ), - ], -) - - -ShareSearchResult = gql.ObjectType( - name='ShareSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('ShareObject'))), - ], -) diff --git a/backend/dataall/api/Objects/Stack/__init__.py b/backend/dataall/api/Objects/Stack/__init__.py deleted file mode 100644 index 9ddd214b4..000000000 --- a/backend/dataall/api/Objects/Stack/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, - stack_helper, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations', 'stack_helper'] diff --git a/backend/dataall/api/Objects/Stack/mutations.py b/backend/dataall/api/Objects/Stack/mutations.py deleted file mode 100644 index ee2615da7..000000000 --- a/backend/dataall/api/Objects/Stack/mutations.py +++ /dev/null @@ -1,13 +0,0 @@ -from ... import gql -from .resolvers import * - - -updateStack = gql.MutationField( - name='updateStack', - type=gql.Ref('Stack'), - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - resolver=update_stack, -) diff --git a/backend/dataall/api/Objects/Stack/queries.py b/backend/dataall/api/Objects/Stack/queries.py deleted file mode 100644 index 324d2e092..000000000 --- a/backend/dataall/api/Objects/Stack/queries.py +++ /dev/null @@ -1,22 +0,0 @@ -from ... import gql -from .resolvers import * - -getStack = gql.QueryField( - name='getStack', - type=gql.Ref('Stack'), - args=[ - gql.Argument(name='environmentUri', type=gql.String), - gql.Argument(name='stackUri', type=gql.NonNullableType(gql.String)), - ], - resolver=get_stack, -) - -getStackLogs = gql.QueryField( - name='getStackLogs', - type=gql.ArrayType(gql.Ref('StackLog')), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='stackUri', type=gql.NonNullableType(gql.String)), - ], - resolver=get_stack_logs, -) diff --git a/backend/dataall/api/Objects/Stack/resolvers.py b/backend/dataall/api/Objects/Stack/resolvers.py deleted file mode 100644 index 52988f163..000000000 --- a/backend/dataall/api/Objects/Stack/resolvers.py +++ /dev/null @@ -1,117 +0,0 @@ -import json -import logging -import os - -from . import stack_helper -from ...context import Context -from .... import db -from ....aws.handlers.cloudformation import CloudFormation -from ....aws.handlers.cloudwatch import CloudWatch -from ....db import exceptions -from ....db import models -from ....utils import Parameter - -log = logging.getLogger(__name__) - - -def get_stack( - context: Context, source, environmentUri: str = None, stackUri: str = None -): - with context.engine.scoped_session() as session: - env: models.Environment = session.query(models.Environment).get(environmentUri) - stack: models.Stack = session.query(models.Stack).get(stackUri) - cfn_task = stack_helper.save_describe_stack_task(session, env, stack, None) - CloudFormation.describe_stack_resources(engine=context.engine, task=cfn_task) - return db.api.Environment.get_stack( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'stackUri': stackUri}, - check_perm=True, - ) - - -def resolve_link(context, source, **kwargs): - if not source: - return None - return f'https://{source.region}.console.aws.amazon.com/cloudformation/home?region={source.region}#/stacks/stackinfo?stackId={source.stackid}' - - -def resolve_outputs(context, source: models.Stack, **kwargs): - if not source: - return None - return json.dumps(source.outputs or {}) - - -def resolve_resources(context, source: models.Stack, **kwargs): - if not source: - return None - return json.dumps(source.resources or {}) - - -def resolve_error(context, source: models.Stack, **kwargs): - if not source: - return None - return json.dumps(source.error or {}) - - -def resolve_events(context, source: models.Stack, **kwargs): - if not source: - return None - return json.dumps(source.events or {}) - - -def resolve_task_id(context, source: models.Stack, **kwargs): - if not source: - return None - if source.EcsTaskArn: - return source.EcsTaskArn.split('/')[-1] - - -def get_stack_logs( - context: Context, source, environmentUri: str = None, stackUri: str = None -): - with context.engine.scoped_session() as session: - stack = db.api.Environment.get_stack( - session=session, - username=context.username, - groups=context.groups, - uri=environmentUri, - data={'stackUri': stackUri}, - check_perm=True, - ) - if not stack.EcsTaskArn: - raise exceptions.AWSResourceNotFound( - action='GET_STACK_LOGS', - message='Logs could not be found for this stack', - ) - - query = f"""fields @timestamp, @message, @logStream, @log as @logGroup - | sort @timestamp asc - | filter @logStream like "{stack.EcsTaskArn.split('/')[-1]}" - """ - envname = os.getenv('envname', 'local') - results = CloudWatch.run_query( - query=query, - log_group_name=f"/{Parameter().get_parameter(env=envname, path='resourcePrefix')}/{envname}/ecs/cdkproxy", - days=1, - ) - log.info(f'Running Logs query {query}') - return results - - -def update_stack( - context: Context, source, targetUri: str = None, targetType: str = None -): - with context.engine.scoped_session() as session: - stack = db.api.Stack.update_stack( - session=session, - username=context.username, - groups=context.groups, - uri=targetUri, - data={'targetType': targetType}, - check_perm=True, - ) - stack_helper.deploy_stack(context, stack.targetUri) - return stack diff --git a/backend/dataall/api/Objects/Stack/schema.py b/backend/dataall/api/Objects/Stack/schema.py deleted file mode 100644 index cc633a69d..000000000 --- a/backend/dataall/api/Objects/Stack/schema.py +++ /dev/null @@ -1,34 +0,0 @@ -from ... import gql -from .resolvers import * - -Stack = gql.ObjectType( - name='Stack', - fields=[ - gql.Field(name='stackUri', type=gql.ID), - gql.Field(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Field(name='stack', type=gql.NonNullableType(gql.String)), - gql.Field(name='environmentUri', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='accountid', type=gql.NonNullableType(gql.String)), - gql.Field(name='region', type=gql.NonNullableType(gql.String)), - gql.Field(name='status', type=gql.String), - gql.Field(name='stackid', type=gql.String), - gql.Field(name='link', type=gql.String, resolver=resolve_link), - gql.Field(name='outputs', type=gql.String, resolver=resolve_outputs), - gql.Field(name='resources', type=gql.String, resolver=resolve_resources), - gql.Field(name='error', type=gql.String, resolver=resolve_error), - gql.Field(name='events', type=gql.String, resolver=resolve_events), - gql.Field(name='EcsTaskArn', type=gql.String), - gql.Field(name='EcsTaskId', type=gql.String, resolver=resolve_task_id), - ], -) - -StackLog = gql.ObjectType( - name='StackLog', - fields=[ - gql.Field(name='logStream', type=gql.String), - gql.Field(name='logGroup', type=gql.String), - gql.Field(name='timestamp', type=gql.String), - gql.Field(name='message', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Stack/stack_helper.py b/backend/dataall/api/Objects/Stack/stack_helper.py deleted file mode 100644 index ea2857ba9..000000000 --- a/backend/dataall/api/Objects/Stack/stack_helper.py +++ /dev/null @@ -1,132 +0,0 @@ -import os - -import requests - -from .... import db -from ....api.context import Context -from ....aws.handlers.service_handlers import Worker -from ....aws.handlers.ecs import Ecs -from ....db import models -from ....utils import Parameter - - -def get_stack_with_cfn_resources(context: Context, targetUri: str, environmentUri: str): - with context.engine.scoped_session() as session: - env: models.Environment = session.query(models.Environment).get(environmentUri) - stack: models.Stack = db.api.Stack.find_stack_by_target_uri( - session, target_uri=targetUri - ) - if not stack: - stack = models.Stack( - stack='environment', - payload={}, - targetUri=targetUri, - accountid=env.AwsAccountId if env else 'UNKNOWN', - region=env.region if env else 'UNKNOWN', - resources=str({}), - error=str({}), - outputs=str({}), - ) - return stack - - cfn_task = save_describe_stack_task(session, env, stack, targetUri) - Worker.queue(engine=context.engine, task_ids=[cfn_task.taskUri]) - return stack - - -def save_describe_stack_task(session, environment, stack, target_uri): - cfn_task = models.Task( - targetUri=stack.stackUri, - action='cloudformation.stack.describe_resources', - payload={ - 'accountid': environment.AwsAccountId, - 'region': environment.region, - 'role_arn': environment.CDKRoleArn, - 'stack_name': stack.name, - 'stackUri': stack.stackUri, - 'targetUri': target_uri, - }, - ) - session.add(cfn_task) - session.commit() - return cfn_task - - -def deploy_stack(context, targetUri): - with context.engine.scoped_session() as session: - stack: models.Stack = db.api.Stack.get_stack_by_target_uri( - session, target_uri=targetUri - ) - envname = os.getenv('envname', 'local') - - if envname in ['local', 'pytest', 'dkrcompose']: - requests.post(f'{context.cdkproxyurl}/stack/{stack.stackUri}') - - else: - cluster_name = Parameter().get_parameter( - env=envname, path='ecs/cluster/name' - ) - if not Ecs.is_task_running(cluster_name, f'awsworker-{stack.stackUri}'): - stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack.stackUri) - else: - task: models.Task = models.Task( - action='ecs.cdkproxy.deploy', targetUri=stack.stackUri - ) - session.add(task) - session.commit() - Worker.queue(engine=context.engine, task_ids=[task.taskUri]) - - return stack - - -def deploy_dataset_stack(context, dataset: models.Dataset): - """ - Each dataset stack deployment triggers environment stack update - to rebuild teams IAM roles data access policies - """ - deploy_stack(context, dataset.datasetUri) - deploy_stack(context, dataset.environmentUri) - - -def delete_stack( - context, target_uri, accountid, cdk_role_arn, region, target_type=None -): - with context.engine.scoped_session() as session: - stack: models.Stack = db.api.Stack.find_stack_by_target_uri( - session, target_uri=target_uri - ) - if not stack: - return - task = models.Task( - targetUri=target_uri, - action='cloudformation.stack.delete', - payload={ - 'accountid': accountid, - 'region': region, - 'cdk_role_arn': cdk_role_arn, - 'stack_name': stack.name, - }, - ) - session.add(task) - - Worker.queue(context.engine, [task.taskUri]) - return True - - -def delete_repository( - context, target_uri, accountid, cdk_role_arn, region, repo_name -): - with context.engine.scoped_session() as session: - task = models.Task( - targetUri=target_uri, - action='repo.datapipeline.delete', - payload={ - 'accountid': accountid, - 'region': region, - 'cdk_role_arn': cdk_role_arn, - 'repo_name': repo_name, - }, - ) - session.add(task) - Worker.queue(context.engine, [task.taskUri]) - return True diff --git a/backend/dataall/api/Objects/Tenant/__init__.py b/backend/dataall/api/Objects/Tenant/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Tenant/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Tenant/input_types.py b/backend/dataall/api/Objects/Tenant/input_types.py deleted file mode 100644 index be980e807..000000000 --- a/backend/dataall/api/Objects/Tenant/input_types.py +++ /dev/null @@ -1,9 +0,0 @@ -from ... import gql - -UpdateGroupTenantPermissionsInput = gql.InputType( - name='UpdateGroupTenantPermissionsInput', - arguments=[ - gql.Argument('permissions', gql.ArrayType(gql.String)), - gql.Argument('groupUri', gql.NonNullableType(gql.String)), - ], -) diff --git a/backend/dataall/api/Objects/Tenant/mutations.py b/backend/dataall/api/Objects/Tenant/mutations.py deleted file mode 100644 index 7f57a5050..000000000 --- a/backend/dataall/api/Objects/Tenant/mutations.py +++ /dev/null @@ -1,33 +0,0 @@ -from ... import gql -from .input_types import UpdateGroupTenantPermissionsInput -from .resolvers import * - -updateGroupPermission = gql.MutationField( - name='updateGroupTenantPermissions', - args=[ - gql.Argument( - name='input', type=gql.NonNullableType(UpdateGroupTenantPermissionsInput) - ) - ], - type=gql.Boolean, - resolver=update_group_permissions, -) - -createQuicksightDataSourceSet = gql.MutationField( - name='createQuicksightDataSourceSet', - args=[ - gql.Argument(name='vpcConnectionId', type=gql.NonNullableType(gql.String)) - ], - type=gql.String, - resolver=create_quicksight_data_source_set, -) - -updateSSMParameter = gql.MutationField( - name='updateSSMParameter', - args=[ - gql.Argument(name='name', type=gql.NonNullableType(gql.String)), - gql.Argument(name='value', type=gql.NonNullableType(gql.String)) - ], - type=gql.String, - resolver=update_ssm_parameter, -) diff --git a/backend/dataall/api/Objects/Tenant/queries.py b/backend/dataall/api/Objects/Tenant/queries.py deleted file mode 100644 index 62cac727f..000000000 --- a/backend/dataall/api/Objects/Tenant/queries.py +++ /dev/null @@ -1,48 +0,0 @@ -from ... import gql -from .resolvers import * - - -listTenantPermissions = gql.QueryField( - name='listTenantPermissions', - type=gql.ArrayType(gql.Ref('Permission')), - resolver=list_tenant_permissions, -) - -listTenantGroups = gql.QueryField( - name='listTenantGroups', - args=[ - gql.Argument(name='filter', type=gql.Ref('GroupFilter')), - ], - type=gql.Ref('GroupSearchResult'), - resolver=list_tenant_groups, -) - -getMonitoringDashboardId = gql.QueryField( - name='getMonitoringDashboardId', - type=gql.String, - resolver=get_monitoring_dashboard_id, -) - -getMonitoringVpcConnectionId = gql.QueryField( - name='getMonitoringVPCConnectionId', - type=gql.String, - resolver=get_monitoring_vpc_connection_id, -) - -getPlatformAuthorSession = gql.QueryField( - name='getPlatformAuthorSession', - args=[ - gql.Argument(name='awsAccount', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_author_session, -) - -getPlatformReaderSession = gql.QueryField( - name='getPlatformReaderSession', - args=[ - gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_reader_session, -) diff --git a/backend/dataall/api/Objects/Tenant/resolvers.py b/backend/dataall/api/Objects/Tenant/resolvers.py deleted file mode 100644 index 8bc57be62..000000000 --- a/backend/dataall/api/Objects/Tenant/resolvers.py +++ /dev/null @@ -1,132 +0,0 @@ -import os - -from .... import db -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.parameter_store import ParameterStoreManager -from ....aws.handlers.quicksight import Quicksight -from ....db import exceptions - - -def update_group_permissions(context, source, input=None): - with context.engine.scoped_session() as session: - return db.api.TenantPolicy.update_group_permissions( - session=session, - username=context.username, - groups=context.groups, - uri=input['groupUri'], - data=input, - check_perm=True, - ) - - -def list_tenant_permissions(context, source): - with context.engine.scoped_session() as session: - return db.api.TenantPolicy.list_tenant_permissions( - session=session, username=context.username, groups=context.groups - ) - - -def list_tenant_groups(context, source, filter=None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.TenantPolicy.list_tenant_groups( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def update_ssm_parameter(context, source, name: str = None, value: str = None): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - print(value) - print(name) - response = ParameterStoreManager.update_parameter(AwsAccountId=current_account, region=region, parameter_name=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/{name}', parameter_value=value) - return response - - -def get_monitoring_dashboard_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - dashboard_id = ParameterStoreManager.get_parameter_value(AwsAccountId=current_account, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId') - if not dashboard_id: - raise exceptions.AWSResourceNotFound( - action='GET_DASHBOARD_ID', - message='Dashboard Id could not be found on AWS Parameter Store', - ) - return dashboard_id - - -def get_monitoring_vpc_connection_id(context, source): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - vpc_connection_id = ParameterStoreManager.get_parameter_value(AwsAccountId=current_account, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId') - if not vpc_connection_id: - raise exceptions.AWSResourceNotFound( - action='GET_VPC_CONNECTION_ID', - message='Dashboard Id could not be found on AWS Parameter Store', - ) - return vpc_connection_id - - -def create_quicksight_data_source_set(context, source, vpcConnectionId: str = None): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - user = Quicksight.register_user_in_group(AwsAccountId=current_account, UserName=context.username, GroupName='dataall', UserRole='AUTHOR') - - datasourceId = Quicksight.create_data_source_vpc(AwsAccountId=current_account, region=region, UserName=context.username, vpcConnectionId=vpcConnectionId) - # Data sets are not created programmatically. Too much overhead for the value added. However, an example API is provided: - # datasets = Quicksight.create_data_set_from_source(AwsAccountId=current_account, region=region, UserName='dataallTenantUser', dataSourceId=datasourceId, tablesToImport=['organization', 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object']) - - return datasourceId - - -def get_quicksight_author_session(context, source, awsAccount: str = None): - with context.engine.scoped_session() as session: - admin = db.api.TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise db.exceptions.TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - region = os.getenv('AWS_REGION', 'eu-west-1') - - url = Quicksight.get_author_session( - AwsAccountId=awsAccount, - region=region, - UserName=context.username, - UserRole='AUTHOR', - ) - - return url - - -def get_quicksight_reader_session(context, source, dashboardId: str = None): - with context.engine.scoped_session() as session: - admin = db.api.TenantPolicy.is_tenant_admin(context.groups) - - if not admin: - raise db.exceptions.TenantUnauthorized( - username=context.username, - action=db.permissions.TENANT_ALL, - tenant_name=context.username, - ) - - region = os.getenv('AWS_REGION', 'eu-west-1') - current_account = SessionHelper.get_account() - - url = Quicksight.get_reader_session( - AwsAccountId=current_account, - region=region, - UserName=context.username, - UserRole='READER', - DashboardId=dashboardId - ) - - return url diff --git a/backend/dataall/api/Objects/Tenant/schema.py b/backend/dataall/api/Objects/Tenant/schema.py deleted file mode 100644 index 833995243..000000000 --- a/backend/dataall/api/Objects/Tenant/schema.py +++ /dev/null @@ -1,10 +0,0 @@ -from ... import gql - -Tenant = gql.ObjectType( - name='Tenant', - fields=[ - gql.Field(name='tenantUri', type=gql.ID), - gql.Field(name='name', type=gql.String), - gql.Field(name='created', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Test/__init__.py b/backend/dataall/api/Objects/Test/__init__.py deleted file mode 100644 index af75bab94..000000000 --- a/backend/dataall/api/Objects/Test/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -from datetime import datetime - -from ... import gql - -TestType = gql.ObjectType( - name='TestType', - fields=[ - gql.Field(name='_ts', type=gql.String), - gql.Field(name='message', type=gql.String), - gql.Field(name='arg', type=gql.String), - gql.Field(name='username', type=gql.String), - gql.Field(name='groups', type=gql.ArrayType(gql.String)), - ], -) - - -def test_resolver(context, source, arg: str = None): - return { - '_ts': datetime.now().isoformat(), - 'message': 'server is up', - 'username': context.username, - 'groups': context.groups or [], - 'arg': arg or '', - } - - -test_field = gql.QueryField( - name='up', - args=[gql.Argument(name='arg', type=gql.String)], - type=TestType, - resolver=test_resolver, -) diff --git a/backend/dataall/api/Objects/Vote/__init__.py b/backend/dataall/api/Objects/Vote/__init__.py deleted file mode 100644 index 7a595b458..000000000 --- a/backend/dataall/api/Objects/Vote/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - queries, - resolvers, - schema, - mutations, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Vote/input_types.py b/backend/dataall/api/Objects/Vote/input_types.py deleted file mode 100644 index 1943fcebb..000000000 --- a/backend/dataall/api/Objects/Vote/input_types.py +++ /dev/null @@ -1,10 +0,0 @@ -from ... import gql - -VoteInput = gql.InputType( - name='VoteInput', - arguments=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='upvote', type=gql.NonNullableType(gql.Boolean)), - ], -) diff --git a/backend/dataall/api/Objects/Vote/mutations.py b/backend/dataall/api/Objects/Vote/mutations.py deleted file mode 100644 index 5fc10debd..000000000 --- a/backend/dataall/api/Objects/Vote/mutations.py +++ /dev/null @@ -1,12 +0,0 @@ -from ... import gql -from .resolvers import * - - -upVote = gql.MutationField( - name='upVote', - type=gql.Ref('Vote'), - args=[ - gql.Argument(name='input', type=gql.NonNullableType(gql.Ref('VoteInput'))), - ], - resolver=upvote, -) diff --git a/backend/dataall/api/Objects/Vote/queries.py b/backend/dataall/api/Objects/Vote/queries.py deleted file mode 100644 index 04bbceb3a..000000000 --- a/backend/dataall/api/Objects/Vote/queries.py +++ /dev/null @@ -1,24 +0,0 @@ -from ... import gql -from .resolvers import * - - -countUpVotes = gql.QueryField( - name='countUpVotes', - type=gql.Integer, - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - resolver=count_upvotes, -) - - -getVote = gql.QueryField( - name='getVote', - type=gql.Ref('Vote'), - args=[ - gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), - ], - resolver=get_vote, -) diff --git a/backend/dataall/api/Objects/Vote/resolvers.py b/backend/dataall/api/Objects/Vote/resolvers.py deleted file mode 100644 index da41462cd..000000000 --- a/backend/dataall/api/Objects/Vote/resolvers.py +++ /dev/null @@ -1,51 +0,0 @@ -from .... import db -from ....api.context import Context -from ....searchproxy.indexers import upsert_dashboard -from ....searchproxy.indexers import upsert_dataset - - -def count_upvotes( - context: Context, source, targetUri: str = None, targetType: str = None -): - with context.engine.scoped_session() as session: - return db.api.Vote.count_upvotes( - session=session, - username=context.username, - groups=context.groups, - uri=targetUri, - data={'targetType': targetType}, - check_perm=True, - ) - - -def upvote(context: Context, source, input=None): - with context.engine.scoped_session() as session: - vote = db.api.Vote.upvote( - session=session, - username=context.username, - groups=context.groups, - uri=input['targetUri'], - data=input, - check_perm=True, - ) - reindex(session, context.es, vote) - return vote - - -def reindex(session, es, vote): - if vote.targetType == 'dataset': - upsert_dataset(session=session, es=es, datasetUri=vote.targetUri) - elif vote.targetType == 'dashboard': - upsert_dashboard(session=session, es=es, dashboardUri=vote.targetUri) - - -def get_vote(context: Context, source, targetUri: str = None, targetType: str = None): - with context.engine.scoped_session() as session: - return db.api.Vote.get_vote( - session=session, - username=context.username, - groups=context.groups, - uri=targetUri, - data={'targetType': targetType}, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/Vote/schema.py b/backend/dataall/api/Objects/Vote/schema.py deleted file mode 100644 index 347daec28..000000000 --- a/backend/dataall/api/Objects/Vote/schema.py +++ /dev/null @@ -1,12 +0,0 @@ -from ... import gql - -Vote = gql.ObjectType( - name='Vote', - fields=[ - gql.Field(name='voteUri', type=gql.ID), - gql.Field(name='targetType', type=gql.String), - gql.Field(name='targetUri', type=gql.String), - gql.Field(name='upvote', type=gql.Boolean), - gql.Field(name='created', type=gql.String), - ], -) diff --git a/backend/dataall/api/Objects/Vpc/__init__.py b/backend/dataall/api/Objects/Vpc/__init__.py deleted file mode 100644 index 357cbaa49..000000000 --- a/backend/dataall/api/Objects/Vpc/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from . import input_types, queries, mutations, resolvers, schema - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Vpc/input_types.py b/backend/dataall/api/Objects/Vpc/input_types.py deleted file mode 100644 index 13fde2d1c..000000000 --- a/backend/dataall/api/Objects/Vpc/input_types.py +++ /dev/null @@ -1,24 +0,0 @@ -from ... import gql - -VpcFilter = gql.InputType( - name='VpcFilter', - arguments=[ - gql.Argument('term', gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -NewVpcInput = gql.InputType( - name='NewVpcInput', - arguments=[ - gql.Argument(name='label', type=gql.NonNullableType(gql.String)), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='vpcId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='publicSubnetIds', type=gql.ArrayType(gql.String)), - gql.Argument(name='privateSubnetIds', type=gql.ArrayType(gql.String)), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='SamlGroupName', type=gql.NonNullableType(gql.String)), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - ], -) diff --git a/backend/dataall/api/Objects/Vpc/mutations.py b/backend/dataall/api/Objects/Vpc/mutations.py deleted file mode 100644 index 6c2a0339c..000000000 --- a/backend/dataall/api/Objects/Vpc/mutations.py +++ /dev/null @@ -1,16 +0,0 @@ -from ... import gql -from .resolvers import * - -createNetwork = gql.MutationField( - name='createNetwork', - type=gql.Ref('Vpc'), - args=[gql.Argument(name='input', type=gql.NonNullableType(gql.Ref('NewVpcInput')))], - resolver=create_network, -) - -deleteNetwork = gql.MutationField( - name='deleteNetwork', - type=gql.Boolean, - args=[gql.Argument(name='vpcUri', type=gql.NonNullableType(gql.String))], - resolver=delete_network, -) diff --git a/backend/dataall/api/Objects/Vpc/queries.py b/backend/dataall/api/Objects/Vpc/queries.py deleted file mode 100644 index 0975975c5..000000000 --- a/backend/dataall/api/Objects/Vpc/queries.py +++ /dev/null @@ -1,9 +0,0 @@ -from ... import gql -from .resolvers import * - -getNetwork = gql.QueryField( - name='getNetwork', - args=[gql.Argument(name='vpcUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('Vpc'), - resolver=get_network, -) diff --git a/backend/dataall/api/Objects/Vpc/resolvers.py b/backend/dataall/api/Objects/Vpc/resolvers.py deleted file mode 100644 index 830ec58b2..000000000 --- a/backend/dataall/api/Objects/Vpc/resolvers.py +++ /dev/null @@ -1,43 +0,0 @@ -import logging - -from ....api.context import Context -from ....db.api import Vpc - -log = logging.getLogger(__name__) - - -def create_network(context: Context, source, input): - with context.engine.scoped_session() as session: - vpc = Vpc.create_network( - session=session, - username=context.username, - groups=context.groups, - uri=input['environmentUri'], - data=input, - check_perm=True, - ) - return vpc - - -def get_network(context: Context, source, vpcUri: str = None): - with context.engine.scoped_session() as session: - return Vpc.get_network( - session=session, - username=context.username, - groups=context.groups, - uri=vpcUri, - data=None, - check_perm=True, - ) - - -def delete_network(context: Context, source, vpcUri=None): - with context.engine.scoped_session() as session: - return Vpc.delete( - session=session, - username=context.username, - groups=context.groups, - uri=vpcUri, - data=None, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/Vpc/schema.py b/backend/dataall/api/Objects/Vpc/schema.py deleted file mode 100644 index 5e93110f6..000000000 --- a/backend/dataall/api/Objects/Vpc/schema.py +++ /dev/null @@ -1,35 +0,0 @@ -from ... import gql - -Vpc = gql.ObjectType( - name='Vpc', - fields=[ - gql.Field(name='VpcId', type=gql.NonNullableType(gql.String)), - gql.Field(name='vpcUri', type=gql.NonNullableType(gql.ID)), - gql.Field(name='environment', type=gql.Ref('Environment')), - gql.Field(name='label', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.NonNullableType(gql.String)), - gql.Field(name='region', type=gql.NonNullableType(gql.String)), - gql.Field(name='privateSubnetIds', type=gql.ArrayType(gql.String)), - gql.Field(name='publicSubnetIds', type=gql.ArrayType(gql.String)), - gql.Field(name='SamlGroupName', type=gql.String), - gql.Field(name='default', type=gql.Boolean), - ], -) -VpcSearchResult = gql.ObjectType( - name='VpcSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='nextPage', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='previousPage', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Vpc'))), - ], -) diff --git a/backend/dataall/api/Objects/Worksheet/__init__.py b/backend/dataall/api/Objects/Worksheet/__init__.py deleted file mode 100644 index dfa46b264..000000000 --- a/backend/dataall/api/Objects/Worksheet/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import ( - input_types, - mutations, - queries, - resolvers, - schema, -) - -__all__ = ['resolvers', 'schema', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/api/Objects/Worksheet/input_types.py b/backend/dataall/api/Objects/Worksheet/input_types.py deleted file mode 100644 index 50724028f..000000000 --- a/backend/dataall/api/Objects/Worksheet/input_types.py +++ /dev/null @@ -1,90 +0,0 @@ -from ... import gql - -NewWorksheetInput = gql.InputType( - name='NewWorksheetInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), - ], -) - -UpdateWorksheetInput = gql.InputType( - name='UpdateWorksheetInput', - arguments=[ - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - gql.Argument(name='tags', type=gql.ArrayType(gql.String)), - gql.Argument(name='sqlBody', type=gql.String), - gql.Argument(name='chartConfig', type=gql.Ref('WorksheetChartConfigInput')), - ], -) - - -WorksheetChartInput = gql.InputType( - name='WorksheetChartInput', - arguments=[ - gql.Argument(name='chartConfig', type=gql.String), - gql.Argument(name='label', type=gql.String), - gql.Argument(name='description', type=gql.String), - ], -) - -WorksheetQueryInput = gql.InputType( - name='WorksheetQueryInput', - arguments=[ - gql.Argument(name='sqlBody', type=gql.String), - gql.Argument(name='AthenaQueryId', type=gql.String), - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - ], -) - - -WorksheetFilter = gql.InputType( - name='WorksheetFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) - -WorksheetShareInput = gql.InputType( - name='WorksheetShareInput', - arguments=[ - gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='principalType', type=gql.NonNullableType(gql.String)), - gql.Argument(name='canEdit', type=gql.NonNullableType(gql.Boolean)), - ], -) - - -WorksheetDimensionInput = gql.InputType( - name='WorksheetDimensionInput', - arguments=[ - gql.Argument(name='columnName', type=gql.String), - ], -) - -WorksheetMeasureInput = gql.InputType( - name='WorksheetMeasureInput', - arguments=[ - gql.Argument(name='columnName', type=gql.String), - gql.Argument(name='aggregationName', type=gql.String), - ], -) - - -WorksheetChartConfigInput = gql.InputType( - name='WorksheetChartConfigInput', - arguments=[ - gql.Argument(name='chartType', type=gql.String), - gql.Argument( - name='dimensions', type=gql.ArrayType(gql.Ref('WorksheetDimensionInput')) - ), - gql.Argument( - name='measures', type=gql.ArrayType(gql.Ref('WorksheetMeasureInput')) - ), - ], -) diff --git a/backend/dataall/api/Objects/Worksheet/mutations.py b/backend/dataall/api/Objects/Worksheet/mutations.py deleted file mode 100644 index 62f6d2010..000000000 --- a/backend/dataall/api/Objects/Worksheet/mutations.py +++ /dev/null @@ -1,59 +0,0 @@ -from ... import gql -from .resolvers import * - - -createWorksheet = gql.MutationField( - name='createWorksheet', - args=[gql.Argument(name='input', type=gql.Ref('NewWorksheetInput'))], - type=gql.Ref('Worksheet'), - resolver=create_worksheet, -) - -updateWorksheet = gql.MutationField( - name='updateWorksheet', - resolver=update_worksheet, - args=[ - gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('UpdateWorksheetInput')), - ], - type=gql.Ref('Worksheet'), -) - -shareWorksheet = gql.MutationField( - name='shareWorksheet', - resolver=share_worksheet, - args=[ - gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='input', type=gql.Ref('WorksheetShareInput')), - ], - type=gql.Ref('WorksheetShare'), -) - -updateShareWorksheet = gql.MutationField( - name='updateShareWorksheet', - resolver=update_worksheet_share, - args=[ - gql.Argument(name='worksheetShareUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='canEdit', type=gql.NonNullableType(gql.Boolean)), - ], - type=gql.Ref('WorksheetShare'), -) - -deleteShareWorksheet = gql.MutationField( - name='deleteShareWorksheet', - resolver=remove_worksheet_share, - args=[ - gql.Argument(name='worksheetShareUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, -) - - -deleteWorksheet = gql.MutationField( - name='deleteWorksheet', - resolver=delete_worksheet, - args=[ - gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), - ], - type=gql.Boolean, -) diff --git a/backend/dataall/api/Objects/Worksheet/queries.py b/backend/dataall/api/Objects/Worksheet/queries.py deleted file mode 100644 index 488f5d3cb..000000000 --- a/backend/dataall/api/Objects/Worksheet/queries.py +++ /dev/null @@ -1,30 +0,0 @@ -from ... import gql -from .resolvers import * - - -getWorksheet = gql.QueryField( - name='getWorksheet', - type=gql.Ref('Worksheet'), - resolver=get_worksheet, - args=[gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String))], -) - - -listWorksheets = gql.QueryField( - name='listWorksheets', - resolver=list_worksheets, - args=[gql.Argument(name='filter', type=gql.Ref('WorksheetFilter'))], - type=gql.Ref('Worksheets'), -) - - -runAthenaSqlQuery = gql.QueryField( - name='runAthenaSqlQuery', - type=gql.Ref('AthenaQueryResult'), - args=[ - gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), - gql.Argument(name='sqlQuery', type=gql.NonNullableType(gql.String)), - ], - resolver=run_sql_query, -) diff --git a/backend/dataall/api/Objects/Worksheet/resolvers.py b/backend/dataall/api/Objects/Worksheet/resolvers.py deleted file mode 100644 index d84adda1a..000000000 --- a/backend/dataall/api/Objects/Worksheet/resolvers.py +++ /dev/null @@ -1,169 +0,0 @@ -from sqlalchemy import and_ - -from .... import db -from ..AthenaQueryResult import helpers as athena_helpers -from ....api.constants import WorksheetRole -from ....api.context import Context -from ....db import paginate, exceptions, permissions, models -from ....db.api import ResourcePolicy - - -def create_worksheet(context: Context, source, input: dict = None): - with context.engine.scoped_session() as session: - return db.api.Worksheet.create_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=input, - check_perm=True, - ) - - -def update_worksheet( - context: Context, source, worksheetUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - return db.api.Worksheet.update_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=worksheetUri, - data=input, - check_perm=True, - ) - - -def get_worksheet(context: Context, source, worksheetUri: str = None): - with context.engine.scoped_session() as session: - return db.api.Worksheet.get_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=worksheetUri, - data=None, - check_perm=True, - ) - - -def resolve_user_role(context: Context, source: models.Worksheet): - if context.username and source.owner == context.username: - return WorksheetRole.Creator.value - elif context.groups and source.SamlAdminGroupName in context.groups: - return WorksheetRole.Admin.value - return WorksheetRole.NoPermission.value - - -def list_worksheets(context, source, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - return db.api.Worksheet.paginated_user_worksheets( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) - - -def share_worksheet( - context: Context, source, worksheetUri: str = None, input: dict = None -): - with context.engine.scoped_session() as session: - return db.api.Worksheet.share_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=worksheetUri, - data=input, - check_perm=True, - ) - - -def update_worksheet_share( - context, source, worksheetShareUri: str = None, canEdit: bool = None -): - with context.engine.scoped_session() as session: - share: models.WorksheetShare = session.query(models.WorksheetShare).get( - worksheetShareUri - ) - if not share: - raise exceptions.ObjectNotFound('WorksheetShare', worksheetShareUri) - - return db.api.Worksheet.update_share_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=share.worksheetUri, - data={'canEdit': canEdit, 'share': share}, - check_perm=True, - ) - - return share - - -def remove_worksheet_share(context, source, worksheetShareUri): - with context.engine.scoped_session() as session: - share: models.WorksheetShare = session.query(models.WorksheetShare).get( - worksheetShareUri - ) - if not share: - raise exceptions.ObjectNotFound('WorksheetShare', worksheetShareUri) - - return db.api.Worksheet.delete_share_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=share.worksheetUri, - data={'share': share}, - check_perm=True, - ) - - -def resolve_shares(context: Context, source: models.Worksheet, filter: dict = None): - if not filter: - filter = {} - with context.engine.scoped_session() as session: - q = session.query(models.WorksheetShare).filter( - models.WorksheetShare.worksheetUri == source.worksheetUri - ) - return paginate( - q, page_size=filter.get('pageSize', 15), page=filter.get('page', 1) - ).to_dict() - - -def run_sql_query( - context: Context, source, environmentUri: str = None, worksheetUri: str = None, sqlQuery: str = None -): - with context.engine.scoped_session() as session: - ResourcePolicy.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=permissions.RUN_ATHENA_QUERY, - ) - environment = db.api.Environment.get_environment_by_uri(session, environmentUri) - worksheet = db.api.Worksheet.get_worksheet_by_uri(session, worksheetUri) - - env_group = db.api.Environment.get_environment_group( - session, worksheet.SamlAdminGroupName, environment.environmentUri - ) - - return athena_helpers.run_query_with_role( - environment=environment, environment_group=env_group, sql=sqlQuery - ) - - -def delete_worksheet(context, source, worksheetUri: str = None): - with context.engine.scoped_session() as session: - return db.api.Worksheet.delete_worksheet( - session=session, - username=context.username, - groups=context.groups, - uri=worksheetUri, - data=None, - check_perm=True, - ) diff --git a/backend/dataall/api/Objects/Worksheet/schema.py b/backend/dataall/api/Objects/Worksheet/schema.py deleted file mode 100644 index af51aeae9..000000000 --- a/backend/dataall/api/Objects/Worksheet/schema.py +++ /dev/null @@ -1,116 +0,0 @@ -from ... import gql -from ..Worksheet.resolvers import * - - -Worksheet = gql.ObjectType( - name='Worksheet', - fields=[ - gql.Field(name='worksheetUri', type=gql.ID), - gql.Field(name='label', type=gql.String), - gql.Field(name='name', type=gql.String), - gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='description', type=gql.String), - gql.Field(name='sqlBody', type=gql.String), - gql.Field(name='chartConfig', type=gql.Ref('WorksheetChartConfig')), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - gql.Field(name='updated', type=gql.String), - gql.Field(name='owner', type=gql.NonNullableType(gql.String)), - gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field( - name='lastSavedQueryResult', - type=gql.Ref('AthenaQueryResult'), - ), - gql.Field( - args=[gql.Argument(name='filter', type=gql.Ref('WorksheetFilter'))], - name='shares', - resolver=resolve_shares, - type=gql.Ref('WorksheetShares'), - ), - gql.Field( - name='userRoleForWorksheet', - type=gql.Ref('WorksheetRole'), - resolver=resolve_user_role, - ), - ], -) - - -Worksheets = gql.ObjectType( - name='Worksheets', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Worksheet'))), - ], -) - - -WorksheetShare = gql.ObjectType( - name='WorksheetShare', - fields=[ - gql.Field(name='worksheetShareUri', type=gql.ID), - gql.Field(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Field(name='principalType', type=gql.NonNullableType(gql.String)), - gql.Field(name='canEdit', type=gql.Boolean), - ], -) - - -WorksheetShares = gql.ObjectType( - name='WorksheetShares', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Boolean), - gql.Field(name='hasPrevious', type=gql.Boolean), - gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('WorksheetShare'))), - ], -) - - -WorksheetQueryResult = gql.ObjectType( - name='WorksheetQueryResult', - fields=[ - gql.Field(name='worksheetQueryResultUri', type=gql.ID), - gql.Field(name='queryType', type=gql.NonNullableType(gql.String)), - gql.Field(name='sqlBody', type=gql.NonNullableType(gql.String)), - gql.Field(name='AthenaQueryId', type=gql.NonNullableType(gql.String)), - gql.Field(name='region', type=gql.NonNullableType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.NonNullableType(gql.String)), - gql.Field(name='AthenaOutputBucketName', type=gql.NonNullableType(gql.String)), - gql.Field(name='AthenaOutputKey', type=gql.NonNullableType(gql.String)), - gql.Field(name='timeElapsedInSecond', type=gql.NonNullableType(gql.Integer)), - gql.Field(name='created', type=gql.NonNullableType(gql.String)), - ], -) - - -WorksheetChartDimension = gql.ObjectType( - name='WorksheetChartDimension', - fields=[gql.Field(name='columnName', type=gql.NonNullableType(gql.String))], -) - -WorksheetChartMeasure = gql.ObjectType( - name='WorksheetChartMeasure', - fields=[ - gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), - gql.Field(name='aggregationName', type=gql.String), - ], -) - -WorksheetChartConfig = gql.ObjectType( - name='WorksheetChartConfig', - fields=[ - gql.Field(name='AthenaQueryId', type=gql.String), - gql.Field( - name='dimensions', type=gql.ArrayType(gql.Ref('WorksheetChartDimension')) - ), - gql.Field( - name='measures', type=gql.ArrayType(gql.Ref('WorksheetChartMeasure')) - ), - ], -) diff --git a/backend/dataall/api/Objects/__init__.py b/backend/dataall/api/Objects/__init__.py deleted file mode 100644 index 1239c4273..000000000 --- a/backend/dataall/api/Objects/__init__.py +++ /dev/null @@ -1,142 +0,0 @@ -from argparse import Namespace - -from ariadne import ( - EnumType, - MutationType, - ObjectType, - UnionType, - QueryType, - gql as GQL, - make_executable_schema, -) - -from .. import gql -from ...api.constants import GraphQLEnumMapper -from . import ( - Permission, - DataPipeline, - Environment, - Activity, - DatasetTable, - DatasetTableColumn, - Dataset, - Group, - Principal, - Dashboard, - ShareObject, - Organization, - DatasetStorageLocation, - Stack, - Test, - SagemakerStudio, - RedshiftCluster, - DatasetProfiling, - Glossary, - AthenaQueryResult, - Worksheet, - Feed, - Notification, - Vpc, - Tenant, - SagemakerNotebook, - KeyValueTag, - Vote, -) - - -def bootstrap(): - classes = { - gql.ObjectType: [], - gql.QueryField: [], - gql.MutationField: [], - gql.Enum: [], - gql.Union: [], - gql.InputType: [], - } - - Query = gql.ObjectType(name='Query', fields=classes[gql.QueryField]) - - Mutation = gql.ObjectType(name='Mutation', fields=classes[gql.MutationField]) - - for enumclass in GraphQLEnumMapper.__subclasses__(): - enumclass.toGraphQLEnum() - - for cls in classes.keys(): - for name in cls.class_instances['default'].keys(): - if cls.get_instance(name): - classes[cls].append(cls.get_instance(name)) - else: - raise Exception(f'Unknown Graphql Type :`{name}`') - - schema = gql.Schema( - types=classes[gql.ObjectType], - inputs=classes[gql.InputType], - enums=classes[gql.Enum], - unions=classes[gql.Union], - ) - return schema - - -def save(): - schema = bootstrap() - with open('schema.graphql', 'w') as f: - f.write(schema.gql()) - - -def resolver_adapter(resolver): - def adapted(obj, info, **kwargs): - response = resolver( - context=Namespace( - engine=info.context['engine'], - es=info.context['es'], - username=info.context['username'], - groups=info.context['groups'], - schema=info.context['schema'], - cdkproxyurl=info.context['cdkproxyurl'], - ), - source=obj or None, - **kwargs, - ) - return response - - return adapted - - -def get_executable_schema(): - schema = bootstrap() - _types = [] - for _type in schema.types: - if _type.name == 'Query': - query = QueryType() - _types.append(query) - for field in _type.fields: - if field.resolver: - query.field(field.name)(resolver_adapter(field.resolver)) - elif _type.name == 'Mutation': - mutation = MutationType() - _types.append(mutation) - for field in _type.fields: - if field.resolver: - mutation.field(field.name)(resolver_adapter(field.resolver)) - else: - object_type = ObjectType(name=_type.name) - - for field in _type.fields: - if field.resolver: - object_type.field(field.name)(resolver_adapter(field.resolver)) - _types.append(object_type) - - _enums = [] - for enum in schema.enums: - d = {} - for k in enum.values: - d[k.name] = k.value - _enums.append(EnumType(enum.name, d)) - - _unions = [] - for union in schema.unions: - _unions.append(UnionType(union.name, union.resolver)) - - type_defs = GQL(schema.gql(with_directives=False)) - executable_schema = make_executable_schema(type_defs, *(_types + _enums + _unions)) - return executable_schema diff --git a/backend/dataall/api/__init__.py b/backend/dataall/api/__init__.py deleted file mode 100644 index b15269b41..000000000 --- a/backend/dataall/api/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .Objects import bootstrap, get_executable_schema, resolver_adapter -from . import constants - -__all__ = ['constants', 'bootstrap', 'get_executable_schema', 'resolver_adapter'] diff --git a/backend/dataall/api/constants.py b/backend/dataall/api/constants.py deleted file mode 100644 index 746b9e0dd..000000000 --- a/backend/dataall/api/constants.py +++ /dev/null @@ -1,242 +0,0 @@ -""" - 1) i created it DatasetCreator - 2) i belong to the Dataset Admin group DatasetAdmin - 3) i'm the busoness owner DatasetBusinessOwner - 4) i'm a steward DatasetSteward - 5) it's shared with one of My Env Shared - 6) no permission at all NoPermission -""" - - -from enum import Enum -from . import gql - - -class GraphQLEnumMapper(Enum): - @classmethod - def toGraphQLEnum(cls): - return gql.Enum(name=cls.__name__, values=cls) - - @classmethod - def to_value(cls, label): - for c in cls: - if c.name == label: - return c.value - return None - - @classmethod - def to_label(cls, value): - for c in cls: - if getattr(cls, c.name).value == value: - return c.name - return None - - -class OrganisationUserRole(GraphQLEnumMapper): - Owner = '999' - Admin = '900' - Member = '100' - NotMember = '000' - Invited = '800' - - -class GroupMemberRole(GraphQLEnumMapper): - Owner = 'Owner' - Admin = 'Admin' - Member = 'Member' - NotMember = 'NotMember' - - -class EnvironmentPermission(GraphQLEnumMapper): - Owner = '999' - Admin = '900' - DatasetCreator = '800' - Invited = '200' - ProjectAccess = '050' - NotInvited = '000' - - -class EnvironmentType(GraphQLEnumMapper): - Data = 'Data' - Compute = 'Compute' - - -class ProjectMemberRole(GraphQLEnumMapper): - ProjectCreator = '999' - Admin = '900' - NotContributor = '000' - - -class DashboardRole(GraphQLEnumMapper): - Creator = '999' - Admin = '900' - Shared = '800' - NoPermission = '000' - - -class DataPipelineRole(GraphQLEnumMapper): - Creator = '999' - Admin = '900' - NoPermission = '000' - - -class DatasetRole(GraphQLEnumMapper): - # Permissions on a dataset - BusinessOwner = '999' - DataSteward = '998' - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class GlossaryRole(GraphQLEnumMapper): - # Permissions on a glossary - Admin = '900' - NoPermission = '000' - - -class RedshiftClusterRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class ScheduledQueryRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SagemakerNotebookRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SagemakerStudioRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class AirflowClusterRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SortDirection(GraphQLEnumMapper): - asc = 'asc' - desc = 'desc' - - -class ShareableType(GraphQLEnumMapper): - Table = 'DatasetTable' - StorageLocation = 'DatasetStorageLocation' - View = 'View' - - -class PrincipalType(GraphQLEnumMapper): - Any = 'Any' - Organization = 'Organization' - Environment = 'Environment' - User = 'User' - Project = 'Project' - Public = 'Public' - Group = 'Group' - ConsumptionRole = 'ConsumptionRole' - - -class ShareObjectPermission(GraphQLEnumMapper): - Approvers = '999' - Requesters = '800' - DatasetAdmins = '700' - NoPermission = '000' - - -class ShareObjectStatus(GraphQLEnumMapper): - Deleted = 'Deleted' - Approved = 'Approved' - Rejected = 'Rejected' - Revoked = 'Revoked' - Draft = 'Draft' - Submitted = 'Submitted' - Revoke_In_Progress = 'Revoke_In_Progress' - Share_In_Progress = 'Share_In_Progress' - Processed = 'Processed' - - -class ShareItemStatus(GraphQLEnumMapper): - Deleted = 'Deleted' - PendingApproval = 'PendingApproval' - Share_Approved = 'Share_Approved' - Share_Rejected = 'Share_Rejected' - Share_In_Progress = 'Share_In_Progress' - Share_Succeeded = 'Share_Succeeded' - Share_Failed = 'Share_Failed' - Revoke_Approved = 'Revoke_Approved' - Revoke_In_Progress = 'Revoke_In_Progress' - Revoke_Failed = 'Revoke_Failed' - Revoke_Succeeded = 'Revoke_Succeeded' - - -class ShareObjectActions(GraphQLEnumMapper): - Submit = 'Submit' - Approve = 'Approve' - Reject = 'Reject' - RevokeItems = 'RevokeItems' - Start = 'Start' - Finish = 'Finish' - FinishPending = 'FinishPending' - Delete = 'Delete' - - -class ShareItemActions(GraphQLEnumMapper): - AddItem = 'AddItem' - RemoveItem = 'RemoveItem' - Failure = 'Failure' - Success = 'Success' - - -class ConfidentialityClassification(GraphQLEnumMapper): - Unclassified = 'Unclassified' - Official = 'Official' - Secret = 'Secret' - - -class Language(GraphQLEnumMapper): - English = 'English' - French = 'French' - German = 'German' - - -class Topic(GraphQLEnumMapper): - Finances = 'Finances' - HumanResources = 'HumanResources' - Products = 'Products' - Services = 'Services' - Operations = 'Operations' - Research = 'Research' - Sales = 'Sales' - Orders = 'Orders' - Sites = 'Sites' - Energy = 'Energy' - Customers = 'Customers' - Misc = 'Misc' - - -class WorksheetRole(GraphQLEnumMapper): - Creator = '950' - Admin = '900' - SharedWithWritePermission = '500' - SharedWithReadPermission = '400' - NoPermission = '000' - - -GLUEBUSINESSPROPERTIES = ['EXAMPLE_GLUE_PROPERTY_TO_BE_ADDED_ON_ES'] diff --git a/backend/dataall/api/context.py b/backend/dataall/api/context.py deleted file mode 100644 index d2b0a88f0..000000000 --- a/backend/dataall/api/context.py +++ /dev/null @@ -1,14 +0,0 @@ -class Context: - def __init__( - self, - engine=None, - es=None, - username=None, - groups=None, - cdkproxyurl=None, - ): - self.engine = engine - self.es = es - self.username = username - self.groups = groups - self.cdkproxyurl = cdkproxyurl diff --git a/backend/dataall/api/gql/README.md b/backend/dataall/api/gql/README.md deleted file mode 100644 index a8977a6e3..000000000 --- a/backend/dataall/api/gql/README.md +++ /dev/null @@ -1,174 +0,0 @@ -> `gql ` is a tiny package for code-first development of GraphQL APIs. -It allows developers to define their schema using a pythonic interface, and -brings a simple visitor mechanism for schema extension. - -# Simple example - -`gql` maps GraphQL constructs to Python classes, that can be defined, manipulated and extended. - -```python -import dataall.api.gql as gql - -Post = gql.ObjectType( - name="Post", - fields=[ - gql.Field(name="id", type=gql.String), - gql.Field(name="name", type=gql.NonNullableType(gql.String)), - gql.Field(name="comments", type=gql.ArrayType(gql.Thunk(lambda: PostComment))) - ] -) - -PostComment = gql.ObjectType( - name="PostComment", - fields=[ - gql.Field(name="post", type=Post), - gql.Field(name="id", type=gql.String), - gql.Feld(name="comment", type=gql.String) - ] -) - -Query = gql.ObjectType( - name="Query", - fields=[ - gql.Field( - name="getPostById", - args=[gql.Argument(name="postId", type=gql.String)], - type=Post - ) - ] -) - -schema = gql.Schema(types=[Post, PostComment, Query]) -print(schema.gql()) -``` - - -This will output a valid GraphQL schema -```graphql - -type Post { - id : String -name : String! -comments : [PostComment] - } - - - -type PostComment { - post : Post -id : String -comment : String - } - - - - -``` - - # Api - ## gql.Scalar - -Scalar GraphQL types are defined with the following Scalar instances: -``` -import dataall.gql as gql -gql.ID -gql.String -gql.Boolean -gql.Integer -gql.Number -gql.Date -gql.AWSDateTime -``` - - - ## Type Modifiers - -Types can be modified using gql Type modifiers. -Type modifiers can be applied for any valid GraphQL type, including scalar and ObjecType . - -#### `gql.ArrayType(type)` -Defines an array from the provided type - -```python -import dataall.api.gql as gql - -gql.ArrayType(gql.String) # will output [String] - -Foo = gql.ObjectType(name="Foo", fields=[gql.Field(name="id", type=gql.String)]) -gql.ArrayType(Foo) # will output [Foo] - -``` - - - -#### `gql.NonNullableType(type)` -Defines a required type from the provided type - -```python -import dataall.api.gql as gql - -gql.NonNullableType(gql.String) # will output String! - -``` - - -## gql.Field - -`gql.Field` defines a GraphQL Field - -### Methods - -#### **constructor** `gql.Field(name, type, args, directives)` -- `name (String)` : name of the field -- `type(gql.Scalar, gql.TypeModifier,gql.ObjectType,gql.Thunk)`: the type of the field -- `args(list(gql.Argument))` **optional**: A list of gql.Argument, defining GraphQL arguments -- `directives(list(gql.DirectiveArgs))` : A list of field Directive arguments - -```python -import dataall.api.gql as gql - -Child = gql.ObjectType(name="Child", fields=[gql.Field(name="id", type=gql.String)]) -# A simple field -id = gql.Field(name="id", type=gql.NonNullableType(gql.String)) -print(id.gql()) # id : String! - -# A field with arguments -listChildren = gql.Field( - name="listChildren", - type=gql.ArrayType(Child), - args=[gql.Argument(name="childName", type=gql.String)] -) # listChildren(childName:String) : [Child] - -# A field with directives - -directiveField = gql.Field( - name="directiveField", - type=gql.String, - directives=[gql.DirectiveArgs(name="required")] -) # directiveField : String @required - -``` - -#### `gql.Field.directive(name)` -Returns the `gql.DirectiveArgs` instance with the provided name, or `None` if the field does not have a directive with the provided name - - -#### `gql.Field.has_directive(name)` -Returns `True` if the field has a directive named `name`, or False if the field has no directive named `name`. - -### Properties -- `type` : the Field type -- `name` : the Field name -- `args` : the Field argument list, defaults to [] -- `directives` : the Field directive list, defaults to [] - -The -#### `gql.Field.gql(with_directive=True)` -Returns a gql representation of the field. - - ## gql.ObjectType - - ## gql.Thunk - - - ## gql.Thunk diff --git a/backend/dataall/api/gql/graphql_union_type.py b/backend/dataall/api/gql/graphql_union_type.py deleted file mode 100644 index a8fea7e2f..000000000 --- a/backend/dataall/api/gql/graphql_union_type.py +++ /dev/null @@ -1,27 +0,0 @@ -from ._cache import cache_instances -from .utils import get_named_type - - -@cache_instances -class Union: - _register = {} - - def __init__(self, name, types=[], resolver=lambda *_, **__: None): - self.name = name - self.types = types - self.resolver = resolver - Union._register[name] = self - - def gql(self, *args, **kwargs): - return f"union {self.name} = {'|'.join([get_named_type(t).name for t in self.types])}" - - -if __name__ == '__main__': - from .. import gql - - User = gql.ObjectType(name='User', fields=[]) - - Group = gql.ObjectType(name='Group', fields=[]) - userorgroup = Union(name='userorgroup', types=[gql.Thunk(lambda: User), Group]) - - print(userorgroup.gql()) diff --git a/backend/dataall/aws/handlers/cloudformation.py b/backend/dataall/aws/handlers/cloudformation.py deleted file mode 100644 index 460e525ae..000000000 --- a/backend/dataall/aws/handlers/cloudformation.py +++ /dev/null @@ -1,214 +0,0 @@ -import logging -import uuid - -from botocore.exceptions import ClientError - -from .service_handlers import Worker -from .sts import SessionHelper -from ...db import models, Engine -from ...utils import json_utils - -log = logging.getLogger(__name__) - - -class CloudFormation: - def __init__(self): - pass - - @staticmethod - def client(AwsAccountId, region, role=None): - session = SessionHelper.remote_session(accountid=AwsAccountId, role=role) - return session.client('cloudformation', region_name=region) - - @staticmethod - def check_existing_cdk_toolkit_stack(AwsAccountId, region): - role = SessionHelper.get_cdk_look_up_role_arn(accountid=AwsAccountId, region=region) - try: - cfn = CloudFormation.client(AwsAccountId=AwsAccountId, region=region, role=role) - response = cfn.describe_stacks(StackName='CDKToolkit') - except ClientError as e: - log.exception(f'CDKToolkitNotFound: {e}') - raise Exception('CDKToolkitNotFound') - - try: - response = cfn.describe_stack_resource( - StackName='CDKToolkit', LogicalResourceId='CloudFormationExecutionRole' - ) - cdk_role_name = response['StackResourceDetail']['PhysicalResourceId'] - return cdk_role_name - except ClientError as e: - log.exception(f'CDKToolkitDeploymentActionRoleNotFound: {e}') - raise Exception(f'CDKToolkitDeploymentActionRoleNotFound: {e}') - - @staticmethod - @Worker.handler(path='cloudformation.stack.delete') - def delete_stack(engine, task: models.Task): - try: - data = { - 'accountid': task.payload['accountid'], - 'region': task.payload['region'], - 'stack_name': task.payload['stack_name'], - } - CloudFormation.delete_cloudformation_stack(**data) - except ClientError as e: - log.error(f'Failed to delete CFN stack{task.targetUri}: {e}') - raise e - return {'status': 200, 'stackDeleted': True} - - @staticmethod - def delete_cloudformation_stack(**data): - accountid = data['accountid'] - region = data['region'] - stack_name = data['stack_name'] - try: - aws_session = SessionHelper.remote_session(accountid=accountid) - cfnclient = aws_session.client('cloudformation', region_name=region) - response = cfnclient.delete_stack( - StackName=stack_name, - ClientRequestToken=str(uuid.uuid4()), - ) - log.info(f'Stack {stack_name} deleted: {response}') - except ClientError as e: - log.error(f'Failed to delete stack {stack_name}') - raise e - - @staticmethod - @Worker.handler(path='cloudformation.stack.status') - def get_stack_status(engine, task: models.Task): - try: - data = { - 'accountid': task.payload['accountid'], - 'region': task.payload['region'], - 'stack_name': task.payload['stack_name'], - } - return CloudFormation._get_stack(**data)['StackStatus'] - except ClientError as e: - log.error(f'Failed to Get CFN stack status{task.targetUri}: {e}') - raise e - - @staticmethod - def _get_stack(**data) -> dict: - try: - accountid = data['accountid'] - region = data['region'] - stack_name = data['stack_name'] - aws_session = SessionHelper.remote_session(accountid=accountid) - cfnclient = aws_session.client('cloudformation', region_name=region) - response = cfnclient.describe_stacks(StackName=stack_name) - return response['Stacks'][0] - except ClientError as e: - raise e - - @staticmethod - @Worker.handler(path='cloudformation.stack.describe_resources') - def describe_stack_resources(engine, task: models.Task): - try: - filtered_resources = [] - filtered_events = [] - filtered_outputs = {} - data = { - 'accountid': task.payload['accountid'], - 'region': task.payload['region'], - 'stack_name': task.payload['stack_name'], - } - - cfn_stack = CloudFormation._get_stack(**data) - stack_arn = cfn_stack['StackId'] - status = cfn_stack['StackStatus'] - stack_outputs = cfn_stack.get('Outputs', []) - if stack_outputs: - for output in stack_outputs: - print(output) - filtered_outputs[output['OutputKey']] = output['OutputValue'] - resources = CloudFormation._describe_stack_resources(**data)[ - 'StackResources' - ] - events = CloudFormation._describe_stack_events(**data)['StackEvents'] - with engine.scoped_session() as session: - stack: models.Stack = session.query(models.Stack).get( - task.payload['stackUri'] - ) - stack.status = status - stack.stackid = stack_arn - stack.outputs = filtered_outputs - for resource in resources: - filtered_resources.append( - { - 'ResourceStatus': resource.get('ResourceStatus'), - 'LogicalResourceId': resource.get('LogicalResourceId'), - 'PhysicalResourceId': resource.get('PhysicalResourceId'), - 'ResourceType': resource.get('ResourceType'), - 'StackName': resource.get('StackName'), - 'StackId': resource.get('StackId'), - } - ) - stack.resources = {'resources': filtered_resources} - for event in events: - filtered_events.append( - { - 'ResourceStatus': event.get('ResourceStatus'), - 'LogicalResourceId': event.get('LogicalResourceId'), - 'PhysicalResourceId': event.get('PhysicalResourceId'), - 'ResourceType': event.get('ResourceType'), - 'StackName': event.get('StackName'), - 'StackId': event.get('StackId'), - 'EventId': event.get('EventId'), - 'ResourceStatusReason': event.get('ResourceStatusReason'), - } - ) - stack.events = {'events': filtered_events} - stack.error = None - session.commit() - except ClientError as e: - with engine.scoped_session() as session: - stack: models.Stack = session.query(models.Stack).get( - task.payload['stackUri'] - ) - if not stack.error: - stack.error = { - 'error': json_utils.to_string(e.response['Error']['Message']) - } - session.commit() - - @staticmethod - def _describe_stack_resources(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - stack_name = data['stack_name'] - aws_session = SessionHelper.remote_session(accountid=accountid) - client = aws_session.client('cloudformation', region_name=region) - try: - stack_resources = client.describe_stack_resources(StackName=stack_name) - log.info(f'Stack describe resources response : {stack_resources}') - return stack_resources - except ClientError as e: - log.error(e, exc_info=True) - - @staticmethod - def _describe_stack_events(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - stack_name = data['stack_name'] - aws_session = SessionHelper.remote_session(accountid=accountid) - client = aws_session.client('cloudformation', region_name=region) - try: - stack_events = client.describe_stack_events(StackName=stack_name) - log.info(f'Stack describe events response : {stack_events}') - return stack_events - except ClientError as e: - log.error(e, exc_info=True) - - -@Worker.handler(path='environment.check.cdk.boostrap') -def check_cdk_boostrap(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - account = task.payload.get('account') - region = task.payload.get('region') - aws = SessionHelper.remote_session(accountid=account) - cfn = aws.client('cloudformation', region_name=region) - response = cfn.describe_stacks(StackName='CDKToolkit') - stacks = response['Stacks'] - if len(stacks): - return True - else: - return False diff --git a/backend/dataall/aws/handlers/codecommit.py b/backend/dataall/aws/handlers/codecommit.py deleted file mode 100644 index a906b5b25..000000000 --- a/backend/dataall/aws/handlers/codecommit.py +++ /dev/null @@ -1,100 +0,0 @@ -from .service_handlers import Worker -from .sts import SessionHelper -from ...db import models, Engine - - -class CodeCommit: - def __init__(self): - pass - - @staticmethod - def client(AwsAccountId, region): - session = SessionHelper.remote_session(AwsAccountId) - return session.client('codecommit', region_name=region) - - @staticmethod - def _unpack(session, task): - pipe: models.DataPipeline = session.query(models.DataPipeline).get(task.targetUri) - env: models.Environment = session.query(models.Environment).get(pipe.environmentUri) - client = CodeCommit.client(AwsAccountId=env.AwsAccountId, region=env.region) - return (pipe, env, client) - - @staticmethod - @Worker.handler(path='repo.datapipeline.cat') - def cat(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - (pipe, env, client) = CodeCommit._unpack(session, task) - response = client.get_file( - repositoryName=pipe.repo, - commitSpecifier=task.payload.get('branch', 'master'), - filePath=task.payload.get('absolutePath', 'README.md'), - ) - return response['fileContent'] - - @staticmethod - @Worker.handler(path='repo.datapipeline.ls') - def ls(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - (pipe, env, client) = CodeCommit._unpack(session, task) - response = client.get_folder( - repositoryName=pipe.repo, - commitSpecifier=task.payload.get('branch', 'master'), - folderPath=task.payload.get('folderPath'), - ) - nodes = [] - for sub_folder in response['subFolders']: - get_folder_response = client.get_folder( - repositoryName=pipe.repo, - commitSpecifier=task.payload.get('branch', 'master'), - folderPath=sub_folder['absolutePath'], - ) - get_commit = client.get_commit( - repositoryName=pipe.repo, commitId=get_folder_response['commitId'] - ) - commit = get_commit['commit'] - nodes.append( - { - 'type': 'folder', - 'author': commit['author'], - 'relativePath': sub_folder['relativePath'], - 'absolutePath': sub_folder['absolutePath'], - } - ) - for file in response['files']: - get_file_response = client.get_file( - repositoryName=pipe.repo, - commitSpecifier=task.payload.get('branch', 'master'), - filePath=file['absolutePath'], - ) - get_commit = client.get_commit( - repositoryName=pipe.repo, commitId=get_file_response['commitId'] - ) - commit = get_commit['commit'] - nodes.append( - { - 'type': 'file', - 'author': commit['author'], - 'relativePath': file['relativePath'], - 'absolutePath': file['absolutePath'], - } - ) - return nodes - - @staticmethod - @Worker.handler(path='repo.datapipeline.branches') - def list_branches(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - (pipe, env, client) = CodeCommit._unpack(session, task) - response = client.list_branches(repositoryName=pipe.repo) - return response['branches'] - - @staticmethod - @Worker.handler(path='repo.datapipeline.delete') - def delete_repository(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - cc_client = CodeCommit.client( - task.payload.get('accountid', '111111111111'), - task.payload.get('region', 'eu-west-1') - ) - response = cc_client.delete_repository(repositoryName=task.payload.get("repo_name", "dataall-repo")) - return True diff --git a/backend/dataall/aws/handlers/codepipeline.py b/backend/dataall/aws/handlers/codepipeline.py deleted file mode 100644 index 6d22271e4..000000000 --- a/backend/dataall/aws/handlers/codepipeline.py +++ /dev/null @@ -1,44 +0,0 @@ -import logging - -from botocore.exceptions import ClientError -from sqlalchemy import and_ - -from ...db import models, Engine -from .service_handlers import Worker -from .sts import SessionHelper - -log = logging.getLogger('aws:codepipeline') - - -@Worker.handler('datapipeline.pipeline.executions') -def get_pipeline_execution(engine: Engine, task: models.Task): - with engine.scoped_session() as session: - stack = ( - session.query(models.Stack) - .filter( - and_( - models.Stack.targetUri == task.targetUri, - models.Stack.stack == 'PipelineStack', - ) - ) - .first() - ) - Datapipeline: models.DataPipeline = session.query(models.DataPipeline).get( - task.targetUri - ) - outputs = stack.outputs - codepipeline_name = outputs['PipelineNameOutput'] - aws = SessionHelper.remote_session(Datapipeline.AwsAccountId) - codepipeline_client = aws.client('codepipeline', region_name=Datapipeline.region) - executions = [] - try: - response = codepipeline_client.list_pipeline_executions( - pipelineName=codepipeline_name - ) - executions = response['pipelineExecutionSummaries'] - except ClientError as e: - log.warning( - f'Could not retrieve pipeline executions for {codepipeline_name} aws://{Datapipeline.AwsAccountId}:{Datapipeline.region}' - ) - - return executions diff --git a/backend/dataall/aws/handlers/ec2.py b/backend/dataall/aws/handlers/ec2.py deleted file mode 100644 index a0b611fa6..000000000 --- a/backend/dataall/aws/handlers/ec2.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging - -from .sts import SessionHelper - - -log = logging.getLogger(__name__) - - -class EC2: - @staticmethod - def client(account_id: str, region: str, role=None): - session = SessionHelper.remote_session(accountid=account_id, role=role) - return session.client('ec2', region_name=region) - - @staticmethod - def check_default_vpc_exists(AwsAccountId: str, region: str, role=None): - log.info("Check that default VPC exists..") - client = EC2.client(account_id=AwsAccountId, region=region, role=role) - response = client.describe_vpcs( - Filters=[{'Name': 'isDefault', 'Values': ['true']}] - ) - vpcs = response['Vpcs'] - log.info(f"Default VPCs response: {vpcs}") - if vpcs: - return True - return False diff --git a/backend/dataall/aws/handlers/ecs.py b/backend/dataall/aws/handlers/ecs.py deleted file mode 100644 index 3c247b4af..000000000 --- a/backend/dataall/aws/handlers/ecs.py +++ /dev/null @@ -1,194 +0,0 @@ -import logging -import os -import time - -import boto3 -from botocore.exceptions import ClientError - -from .service_handlers import Worker -from ... import db -from ...db import models -from ...utils import Parameter -from ...tasks.data_sharing.data_sharing_service import DataSharingService - -log = logging.getLogger('aws:ecs') - - -class Ecs: - def __init__(self): - pass - - @staticmethod - @Worker.handler(path='ecs.share.approve') - def approve_share(engine, task: models.Task): - envname = os.environ.get('envname', 'local') - if envname in ['local', 'dkrcompose']: - return DataSharingService.approve_share(engine, task.targetUri) - else: - return Ecs.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='approve_share' - ) - - @staticmethod - @Worker.handler(path='ecs.share.revoke') - def revoke_share(engine, task: models.Task): - envname = os.environ.get('envname', 'local') - if envname in ['local', 'dkrcompose']: - return DataSharingService.revoke_share(engine, task.targetUri) - else: - return Ecs.run_share_management_ecs_task( - envname=envname, share_uri=task.targetUri, handler='revoke_share' - ) - - @staticmethod - def run_share_management_ecs_task(envname, share_uri, handler): - share_task_definition = Parameter().get_parameter( - env=envname, path='ecs/task_def_arn/share_management' - ) - container_name = Parameter().get_parameter( - env=envname, path='ecs/container/share_management' - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') - security_groups = Parameter().get_parameter( - env=envname, path='ecs/sharemanager_security_groups' - ) - - try: - Ecs.run_ecs_task( - cluster_name=cluster_name, - task_definition=share_task_definition, - container_name=container_name, - security_groups=security_groups, - subnets=subnets, - environment=[ - {'name': 'shareUri', 'value': share_uri}, - {'name': 'envname', 'value': envname}, - {'name': 'handler', 'value': handler}, - { - 'name': 'AWS_REGION', - 'value': os.getenv('AWS_REGION', 'eu-west-1'), - }, - ], - ) - return True - except ClientError as e: - log.error(e) - raise e - - @staticmethod - @Worker.handler(path='ecs.cdkproxy.deploy') - def deploy_stack(engine, task: models.Task): - with engine.scoped_session() as session: - stack: models.Stack = db.api.Stack.get_stack_by_uri( - session, stack_uri=task.targetUri - ) - envname = os.environ.get('envname', 'local') - cluster_name = Parameter().get_parameter( - env=envname, path='ecs/cluster/name' - ) - - while Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{task.targetUri}'): - log.info( - f'ECS task for stack stack-{task.targetUri} is running waiting for 30 seconds before retrying...' - ) - time.sleep(30) - - stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack_uri=task.targetUri) - - @staticmethod - def run_cdkproxy_task(stack_uri): - envname = os.environ.get('envname', 'local') - cdkproxy_task_definition = Parameter().get_parameter( - env=envname, path='ecs/task_def_arn/cdkproxy' - ) - container_name = Parameter().get_parameter( - env=envname, path='ecs/container/cdkproxy' - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') - security_groups = Parameter().get_parameter( - env=envname, path='ecs/security_groups' - ) - try: - task_arn = Ecs.run_ecs_task( - cluster_name=cluster_name, - task_definition=cdkproxy_task_definition, - container_name=container_name, - security_groups=security_groups, - subnets=subnets, - environment=[ - {'name': 'stackUri', 'value': stack_uri}, - {'name': 'envname', 'value': envname}, - { - 'name': 'AWS_REGION', - 'value': os.getenv('AWS_REGION', 'eu-west-1'), - }, - ], - started_by=f'awsworker-{stack_uri}', - ) - log.info(f'ECS Task {task_arn} running') - return task_arn - except ClientError as e: - log.error(e) - raise e - - @staticmethod - def run_ecs_task( - cluster_name, - task_definition, - container_name, - security_groups, - subnets, - environment, - started_by='awsworker', - ): - response = boto3.client('ecs').run_task( - cluster=cluster_name, - taskDefinition=task_definition, - count=1, - launchType='FARGATE', - networkConfiguration={ - 'awsvpcConfiguration': { - 'subnets': subnets.split(','), - 'securityGroups': security_groups.split(','), - } - }, - overrides={ - 'containerOverrides': [ - { - 'name': container_name, - 'environment': environment, - } - ] - }, - startedBy=started_by, - ) - if response['failures']: - raise Exception( - ', '.join( - [ - 'fail to run task {0} reason: {1}'.format( - failure['arn'], failure['reason'] - ) - for failure in response['failures'] - ] - ) - ) - task_arn = response.get('tasks', [{'taskArn': None}])[0]['taskArn'] - log.info(f'Task started {task_arn}..') - return task_arn - - @staticmethod - def is_task_running(cluster_name, started_by): - try: - client = boto3.client('ecs') - running_tasks = client.list_tasks( - cluster=cluster_name, startedBy=started_by, desiredStatus='RUNNING' - ) - if running_tasks and running_tasks.get('taskArns'): - return True - return False - except ClientError as e: - log.error(e) - raise e diff --git a/backend/dataall/aws/handlers/glue.py b/backend/dataall/aws/handlers/glue.py deleted file mode 100644 index 51929540a..000000000 --- a/backend/dataall/aws/handlers/glue.py +++ /dev/null @@ -1,793 +0,0 @@ -import logging - -from botocore.exceptions import ClientError - -from .service_handlers import Worker -from .sts import SessionHelper -from ... import db -from ...db import models - -log = logging.getLogger('aws:glue') - - -class Glue: - def __init__(self): - pass - - @staticmethod - def create_database(accountid, database, region, location): - try: - existing_database = Glue.database_exists( - accountid=accountid, database=database, region=region - ) - if existing_database: - glue_database_created = True - else: - Glue._create_glue_database(accountid, database, region, location) - glue_database_created = True - return glue_database_created - except ClientError as e: - log.error( - f'Failed to create database {database} on account {accountid} due to {e}' - ) - raise e - - @staticmethod - def _create_glue_database(accountid, database, region, location): - try: - aws_session = SessionHelper.remote_session(accountid=accountid) - glue = aws_session.client('glue', region_name=region) - db_input = { - 'Name': database, - 'Description': 'dataall database {} '.format(database), - 'CreateTableDefaultPermissions': [], - } - if location: - db_input['LocationUri'] = location - log.info(f'Creating Glue database with input: {db_input}') - response = glue.create_database(CatalogId=accountid, DatabaseInput=db_input) - log.info(f'response Create Database: {response}') - return response - except ClientError as e: - log.debug(f'Failed to create database {database}', e) - raise e - - @staticmethod - def get_database_arn(**data): - return 'arn:aws:glue:{}:{}:database/{}'.format( - data.get('region', 'eu-west-1'), data.get('accountid'), data.get('database') - ) - - @staticmethod - def database_exists(**data): - accountid = data['accountid'] - database = data.get('database', 'UnknownDatabaseName') - region = data.get('region', 'eu-west-1') - session = SessionHelper.remote_session(accountid) - try: - glue_client = session.client('glue', region_name=region) - glue_client.get_database(CatalogId=data['accountid'], Name=database) - return True - except ClientError: - log.info(f'Database {database} does not exist on account {accountid}...') - return False - - @staticmethod - @Worker.handler(path='glue.dataset.database.tables') - def list_tables(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( - session, task.targetUri - ) - accountid = dataset.AwsAccountId - region = dataset.region - tables = Glue.list_glue_database_tables( - accountid, dataset.GlueDatabaseName, region - ) - db.api.DatasetTable.sync(session, dataset.datasetUri, glue_tables=tables) - return tables - - @staticmethod - def list_glue_database_tables(accountid, database, region): - aws_session = SessionHelper.remote_session(accountid=accountid) - glue = aws_session.client('glue', region_name=region) - found_tables = [] - try: - log.debug(f'Looking for {database} tables') - - if not Glue.database_exists( - accountid=accountid, database=database, region=region - ): - return found_tables - - paginator = glue.get_paginator('get_tables') - - pages = paginator.paginate( - DatabaseName=database, - CatalogId=accountid, - ) - for page in pages: - found_tables.extend(page['TableList']) - - log.debug(f'Retrieved all database {database} tables: {found_tables}') - - except ClientError as e: - log.error( - f'Failed to retrieve tables for database {accountid}|{database}: {e}', - exc_info=True, - ) - return found_tables - - @staticmethod - def table_exists(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - database = data.get('database', 'UndefinedDatabaseName') - table_name = data.get('tablename', 'UndefinedTableName') - try: - table = ( - SessionHelper.remote_session(accountid) - .client('glue', region_name=region) - .get_table( - CatalogId=data['accountid'], DatabaseName=database, Name=table_name - ) - ) - log.info(f'Glue table found: {data}') - return table - except ClientError: - log.info(f'Glue table not found: {data}') - return None - - @staticmethod - def _create_table(**data): - accountid = data['accountid'] - region = data.get('region', 'eu-west-1') - database = data.get('database', 'UnknownDatabaseName') - - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - log.info( - 'Creating table {} in database {}'.format( - data['tablename'], data['database'] - ) - ) - if not Glue.database_exists( - database=database, region=region, accountid=accountid - ): - Glue.create_database(accountid, database, region, None) - if 'table_input' not in data: - table_input = { - 'Name': data['tablename'], - 'Description': data.get('Description', 'Not available'), - 'Parameters': {'classification': 'csv', 'skip.header.line.count': '1'}, - 'StorageDescriptor': { - 'Columns': [ - {'Name': c['Name'], 'Type': c['Type']} - for c in data.get('columns') - ], - 'Location': data.get('location'), - 'InputFormat': 'org.apache.hadoop.mapred.TextInputFormat', - 'OutputFormat': 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat', - 'SerdeInfo': { - 'SerializationLibrary': 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe', - 'Parameters': { - 'serialization.format': ',', - 'field.delim': ',', - 'escape.delim': '\\', - }, - }, - }, - 'TableType': 'EXTERNAL_TABLE', - 'PartitionKeys': data.get('partition_keys') or [], - } - else: - table_input = data['table_input'] - - found_table = Glue.table_exists(**data) - - if not found_table: - response = glue.create_table( - CatalogId=accountid, - DatabaseName=data.get('database'), - TableInput=table_input, - ) - log.info(f'Successfully Created table {table_input} on account {accountid}') - return response - - else: - - if Glue.is_resource_link(found_table): - - log.info( - f'Table is a Resource Link {found_table} ' - f'on account {accountid} and is managed by source account' - ) - return found_table - - elif Glue.is_resource_link(table_input): - - return Glue.delete_table_and_create_resourcelink( - glue, database, accountid, table_input - ) - - else: - response = glue.update_table( - CatalogId=accountid, - DatabaseName=data.get('database'), - TableInput=table_input, - ) - log.info( - f'Successfully Updated table {found_table} on account {accountid}' - ) - return response - - @staticmethod - def delete_table(accountid, region, database, tablename): - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=region) - log.info( - 'Deleting table {} in database {}'.format( - tablename, database - ) - ) - response = client.delete_table( - CatalogId=accountid, - DatabaseName=database, - Name=tablename - ) - - return response - - @staticmethod - def create_resource_link(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - resource_link_name = data['resource_link_name'] - resource_link_input = data['resource_link_input'] - log.info( - f'Creating ResourceLink {resource_link_name} in database {accountid}://{database}' - ) - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - resource_link = Glue.table_exists( - accountid=accountid, - region=region, - database=database, - tablename=resource_link_name, - ) - if resource_link: - log.info( - f'ResourceLink {resource_link_name} already exists in database {accountid}://{database}' - ) - else: - resource_link = glue.create_table( - CatalogId=accountid, - DatabaseName=database, - TableInput=resource_link_input, - ) - log.info( - f'Successfully created ResourceLink {resource_link_name} in database {accountid}://{database}' - ) - return resource_link - except ClientError as e: - log.error( - f'Could not create ResourceLink {resource_link_name} ' - f'in database {accountid}://{database} ' - f'due to: {e}' - ) - raise e - - @staticmethod - def is_resource_link(table_input: dict): - """ - Verifies if a Glue table or Glue table input contains the block "TargetTable" - if it is the case it means it is a Resource Link - to a shared table by Lake Formation cross account or from the same account - :param table_input: - :return: - """ - if 'TargetTable' in table_input.keys(): - log.info( - f"Table {table_input['Name']} is a resource link " - f"from account {table_input['TargetTable']['CatalogId']} and will not be updated" - ) - return True - return False - - @staticmethod - def delete_table_and_create_resourcelink(glue, database, accountid, table_input): - """ - When table exists before Lake Formation introduction it needs to be deleted - And transformed to a resource link - :param glue: - :param database: - :param accountid: - :param table_input: - :return: - """ - try: - glue.delete_table( - CatalogId=accountid, DatabaseName=database, Name=table_input['Name'] - ) - log.debug( - f'Successfully Deleted table {table_input} on account {accountid}' - ) - response = glue.create_table( - CatalogId=accountid, DatabaseName=database, TableInput=table_input - ) - log.info(f'Successfully Changed table to resource link {response}') - return response - except ClientError as e: - log.warning( - f'Failed to change table to resource link {table_input} due to: {e}' - ) - raise e - - @staticmethod - def delete_database(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - log.info(f'Deleting database {accountid}://{database} ...') - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - if Glue.database_exists( - accountid=accountid, - region=region, - database=database, - ): - glue.delete_database(CatalogId=accountid, Name=database) - return True - except ClientError as e: - log.error( - f'Could not delete database {database} ' - f'in account {accountid} ' - f'due to: {e}' - ) - raise e - - @staticmethod - def batch_delete_tables(**data): - accountid = data['accountid'] - region = data['region'] - database = data['database'] - tables = data['tables'] - - if not tables: - log.info('No tables to delete exiting method...') - return - - log.info(f'Batch deleting tables: {tables}') - try: - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=region) - if Glue.database_exists( - accountid=accountid, - region=region, - database=database, - ): - glue.batch_delete_table( - CatalogId=accountid, DatabaseName=database, TablesToDelete=tables - ) - log.debug( - f'Batch deleted tables {len(tables)} from database {database} successfully' - ) - return True - except ClientError as e: - log.error( - f'Could not batch delete tables {tables} ' - f'in database {accountid}://{database} ' - f'due to: {e}' - ) - raise e - - @staticmethod - @Worker.handler(path='glue.dataset.crawler.create') - def create_crawler(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( - session, task.targetUri - ) - location = task.payload.get('location') - Glue.create_glue_crawler( - **{ - 'crawler_name': f'{dataset.GlueDatabaseName}-{location}'[:52], - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'dataset_role': dataset.IAMDatasetAdminRoleArn, - 'location': location or f's3://{dataset.S3BucketName}', - } - ) - - @staticmethod - def create_glue_crawler(**data): - try: - accountid = data['accountid'] - database = data.get('database') - dataset_role = data['dataset_role'] - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - targets = {'S3Targets': [{'Path': data.get('location')}]} - crawler = Glue._get_crawler(glue, crawler_name) - if crawler: - Glue._update_existing_crawler( - glue, dataset_role, crawler_name, targets, database - ) - else: - crawler = glue.create_crawler( - Name=crawler_name, - Role=dataset_role, - DatabaseName=database, - Targets=targets, - Tags=data.get('tags', {'Application': 'dataall'}), - ) - - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to create Crawler due to %s', e) - - @staticmethod - def get_glue_crawler(data): - try: - accountid = data['accountid'] - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - crawler_name = data.get('crawler_name') - crawler = Glue._get_crawler(glue, crawler_name) - return crawler - except ClientError as e: - log.error('Failed to find Crawler due to %s', e) - raise e - - @staticmethod - @Worker.handler(path='glue.crawler.start') - def start_crawler(engine, task: models.Task): - with engine.scoped_session() as session: - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( - session, task.targetUri - ) - location = task.payload.get('location') - return Glue.start_glue_crawler( - { - 'crawler_name': dataset.GlueCrawlerName, - 'region': dataset.region, - 'accountid': dataset.AwsAccountId, - 'database': dataset.GlueDatabaseName, - 'dataset_role': dataset.IAMDatasetAdminRoleArn, - 'location': location, - } - ) - - @staticmethod - def start_glue_crawler(data): - try: - accountid = data['accountid'] - crawler_name = data['crawler_name'] - database = data['database'] - dataset_role = data['dataset_role'] - targets = {'S3Targets': [{'Path': data.get('location')}]} - session = SessionHelper.remote_session(accountid=accountid) - glue = session.client('glue', region_name=data.get('region', 'eu-west-1')) - if data.get('location'): - Glue._update_existing_crawler( - glue, dataset_role, crawler_name, targets, database - ) - crawler = Glue._get_crawler(glue, crawler_name) - glue.start_crawler(Name=crawler_name) - log.info('Crawler %s started ', crawler_name) - return crawler - except ClientError as e: - log.error('Failed to start Crawler due to %s', e) - raise e - - @staticmethod - def _get_crawler(glue, crawler_name): - crawler = None - try: - crawler = glue.get_crawler(Name=crawler_name) - except ClientError as e: - if e.response['Error']['Code'] == 'EntityNotFoundException': - log.debug(f'Crawler does not exists {crawler_name} %s', e) - else: - raise e - return crawler.get('Crawler') if crawler else None - - @staticmethod - def _update_existing_crawler(glue, dataset_role, crawler_name, targets, database): - try: - glue.stop_crawler(Name=crawler_name) - except ClientError as e: - if ( - e.response['Error']['Code'] == 'CrawlerStoppingException' - or e.response['Error']['Code'] == 'CrawlerNotRunningException' - ): - log.error('Failed to stop crawler %s', e) - try: - glue.update_crawler( - Name=crawler_name, - Role=dataset_role, - DatabaseName=database, - Targets=targets, - ) - log.info('Crawler %s updated ', crawler_name) - except ClientError as e: - log.debug('Failed to stop and update crawler %s', e) - if e.response['Error']['Code'] != 'CrawlerRunningException': - log.error('Failed to update crawler %s', e) - else: - raise e - - @staticmethod - @Worker.handler('glue.table.update_column') - def update_table_columns(engine, task: models.Task): - with engine.scoped_session() as session: - column: models.DatasetTableColumn = session.query( - models.DatasetTableColumn - ).get(task.targetUri) - table: models.DatasetTable = session.query(models.DatasetTable).get( - column.tableUri - ) - try: - aws_session = SessionHelper.remote_session(table.AWSAccountId) - - Glue.grant_pivot_role_all_table_permissions(aws_session, table) - - glue_client = aws_session.client('glue', region_name=table.region) - - original_table = glue_client.get_table( - CatalogId=table.AWSAccountId, - DatabaseName=table.GlueDatabaseName, - Name=table.name, - ) - updated_table = { - k: v - for k, v in original_table['Table'].items() - if k - not in [ - 'CatalogId', - 'VersionId', - 'DatabaseName', - 'CreateTime', - 'UpdateTime', - 'CreatedBy', - 'IsRegisteredWithLakeFormation', - ] - } - all_columns = updated_table.get('StorageDescriptor', {}).get( - 'Columns', [] - ) + updated_table.get('PartitionKeys', []) - for col in all_columns: - if col['Name'] == column.name: - col['Comment'] = column.description - log.info( - f'Found column {column.name} adding description {column.description}' - ) - response = glue_client.update_table( - DatabaseName=table.GlueDatabaseName, - TableInput=updated_table, - ) - log.info( - f'Column {column.name} updated successfully: {response}' - ) - return True - - except ClientError as e: - log.error( - f'Failed to update table column {column.name} description: {e}' - ) - raise e - - @staticmethod - def grant_pivot_role_all_table_permissions(aws_session, table): - """ - Pivot role needs to have all permissions - for tables managed inside dataall - :param aws_session: - :param table: - :return: - """ - try: - lf_client = aws_session.client('lakeformation', region_name=table.region) - grant_dict = dict( - Principal={ - 'DataLakePrincipalIdentifier': SessionHelper.get_delegation_role_arn( - table.AWSAccountId - ) - }, - Resource={ - 'Table': { - 'DatabaseName': table.GlueDatabaseName, - 'Name': table.name, - } - }, - Permissions=['SELECT', 'ALTER', 'DROP', 'INSERT'], - ) - response = lf_client.grant_permissions(**grant_dict) - log.error( - f'Successfully granted pivot role all table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {response}' - ) - except ClientError as e: - log.error( - f'Failed to grant pivot role all table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {e}' - ) - raise e - - @staticmethod - @Worker.handler('glue.table.columns') - def get_table_columns(engine, task: models.Task): - with engine.scoped_session() as session: - dataset_table: models.DatasetTable = session.query(models.DatasetTable).get( - task.targetUri - ) - aws = SessionHelper.remote_session(dataset_table.AWSAccountId) - glue_client = aws.client('glue', region_name=dataset_table.region) - glue_table = {} - try: - glue_table = glue_client.get_table( - CatalogId=dataset_table.AWSAccountId, - DatabaseName=dataset_table.GlueDatabaseName, - Name=dataset_table.name, - ) - except glue_client.exceptions.ClientError as e: - log.error( - f'Failed to get table aws://{dataset_table.AWSAccountId}' - f'//{dataset_table.GlueDatabaseName}' - f'//{dataset_table.name} due to: ' - f'{e}' - ) - db.api.DatasetTable.sync_table_columns( - session, dataset_table, glue_table['Table'] - ) - return True - - @staticmethod - @Worker.handler(path='glue.job.runs') - def get_job_runs(engine, task: models.Task): - with engine.scoped_session() as session: - Data_pipeline: models.DataPipeline = session.query(models.DataPipeline).get( - task.targetUri - ) - aws = SessionHelper.remote_session(Data_pipeline.AwsAccountId) - glue_client = aws.client('glue', region_name=Data_pipeline.region) - try: - response = glue_client.get_job_runs(JobName=Data_pipeline.name) - except ClientError as e: - log.warning(f'Could not retrieve pipeline runs , {str(e)}') - return [] - return response['JobRuns'] - - @staticmethod - @Worker.handler('glue.job.start_profiling_run') - def start_profiling_run(engine, task: models.Task): - with engine.scoped_session() as session: - profiling: models.DatasetProfilingRun = ( - db.api.DatasetProfilingRun.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: models.Dataset = session.query(models.Dataset).get( - profiling.datasetUri - ) - run = Glue.run_job( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'arguments': ( - {'--table': profiling.GlueTableName} - if profiling.GlueTableName - else {} - ), - } - ) - db.api.DatasetProfilingRun.update_run( - session, - profilingRunUri=profiling.profilingRunUri, - GlueJobRunId=run['JobRunId'], - ) - return run - - @staticmethod - def run_job(**data): - accountid = data['accountid'] - name = data['name'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.start_job_run( - JobName=name, Arguments=data.get('arguments', {}) - ) - return response - except ClientError as e: - log.error(f'Failed to start profiling job {name} due to: {e}') - raise e - - @staticmethod - @Worker.handler('glue.job.profiling_run_status') - def get_profiling_run(engine, task: models.Task): - with engine.scoped_session() as session: - profiling: models.DatasetProfilingRun = ( - db.api.DatasetProfilingRun.get_profiling_run( - session, profilingRunUri=task.targetUri - ) - ) - dataset: models.Dataset = session.query(models.Dataset).get( - profiling.datasetUri - ) - glue_run = Glue.get_job_run( - **{ - 'accountid': dataset.AwsAccountId, - 'name': dataset.GlueProfilingJobName, - 'region': dataset.region, - 'run_id': profiling.GlueJobRunId, - } - ) - profiling.status = glue_run['JobRun']['JobRunState'] - session.commit() - return profiling.status - - @staticmethod - def get_job_run(**data): - accountid = data['accountid'] - name = data['name'] - run_id = data['run_id'] - try: - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('glue', region_name=data.get('region', 'eu-west-1')) - response = client.get_job_run(JobName=name, RunId=run_id) - return response - except ClientError as e: - log.error(f'Failed to get job run {run_id} due to: {e}') - raise e - - @staticmethod - def grant_principals_all_table_permissions( - table: models.DatasetTable, principals: [str], client=None - ): - """ - Update the table permissions on Lake Formation - for tables managed by data.all - :param principals: - :param table: - :param client: - :return: - """ - if not client: - client = SessionHelper.remote_session(table.AWSAccountId).client( - 'lakeformation', region_name=table.region - ) - for principal in principals: - try: - grant_dict = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'Table': { - 'DatabaseName': table.GlueDatabaseName, - 'Name': table.name, - } - }, - Permissions=['ALL'], - ) - response = client.grant_permissions(**grant_dict) - log.error( - f'Successfully granted principals {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {response}' - ) - except ClientError as e: - log.error( - f'Failed to grant admin roles {principals} all permissions on table ' - f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' - f'access: {e}' - ) diff --git a/backend/dataall/aws/handlers/kms.py b/backend/dataall/aws/handlers/kms.py deleted file mode 100644 index 4614cbbcd..000000000 --- a/backend/dataall/aws/handlers/kms.py +++ /dev/null @@ -1,74 +0,0 @@ -import logging - -from .sts import SessionHelper - -log = logging.getLogger(__name__) - - -class KMS: - - @staticmethod - def client(account_id: str, region: str): - session = SessionHelper.remote_session(accountid=account_id) - return session.client('kms', region_name=region) - - @staticmethod - def put_key_policy( - account_id: str, - region: str, - key_id: str, - policy_name: str, - policy: str, - ): - try: - kms_client = KMS.client(account_id, region) - kms_client.put_key_policy( - KeyId=key_id, - PolicyName=policy_name, - Policy=policy, - ) - except Exception as e: - log.error( - f'Failed to attach policy to KMS key {key_id} on {account_id} : {e} ' - ) - raise e - - @staticmethod - def get_key_policy( - account_id: str, - region: str, - key_id: str, - policy_name: str, - ): - try: - kms_client = KMS.client(account_id, region) - response = kms_client.get_key_policy( - KeyId=key_id, - PolicyName=policy_name, - ) - except Exception as e: - log.error( - f'Failed to get kms key policy of key {key_id} : {e}' - ) - return None - else: - return response['Policy'] - - @staticmethod - def get_key_id( - account_id: str, - region: str, - key_alias: str, - ): - try: - kms_client = KMS.client(account_id, region) - response = kms_client.describe_key( - KeyId=key_alias, - ) - except Exception as e: - log.error( - f'Failed to get kms key id of {key_alias} : {e}' - ) - return None - else: - return response['KeyMetadata']['KeyId'] diff --git a/backend/dataall/aws/handlers/lakeformation.py b/backend/dataall/aws/handlers/lakeformation.py deleted file mode 100644 index d06515f01..000000000 --- a/backend/dataall/aws/handlers/lakeformation.py +++ /dev/null @@ -1,322 +0,0 @@ -import logging -import uuid - -from botocore.exceptions import ClientError - -from .sts import SessionHelper - -log = logging.getLogger('aws:lakeformation') -PIVOT_ROLE_NAME_PREFIX = "dataallPivotRole" - - -class LakeFormation: - def __init__(self): - pass - - @staticmethod - def check_existing_lf_registered_location(resource_arn: str, role_arn: str, accountid: str, region: str): - """ - Checks if there is a non-dataall-created registered location for the Dataset - Returns False is already existing location else return the resource info - """ - try: - session = SessionHelper.remote_session(accountid) - lf_client = session.client('lakeformation', region_name=region) - response = lf_client.describe_resource(ResourceArn=resource_arn) - registered_role_name = response['ResourceInfo']['RoleArn'].lstrip(f"arn:aws:iam::{accountid}:role/") - log.info(f'LF data location already registered: {response}, registered with role {registered_role_name}') - if registered_role_name.startswith(PIVOT_ROLE_NAME_PREFIX) or response['ResourceInfo']['RoleArn'] == role_arn: - log.info('The existing data location was created as part of the dataset stack. There was no pre-existing data location.') - return False - return response['ResourceInfo'] - - except ClientError as e: - log.info(f'LF data location for resource {resource_arn} not found due to {e}') - return False - - @staticmethod - def grant_pivot_role_all_database_permissions(accountid, region, database): - LakeFormation.grant_permissions_to_database( - client=SessionHelper.remote_session(accountid=accountid).client( - 'lakeformation', region_name=region - ), - principals=[SessionHelper.get_delegation_role_arn(accountid)], - database_name=database, - permissions=['ALL'], - ) - - @staticmethod - def grant_permissions_to_database( - client, - principals, - database_name, - permissions, - permissions_with_grant_options=None, - ): - for principal in principals: - log.info( - f'Granting database permissions {permissions} to {principal} on database {database_name}' - ) - try: - client.grant_permissions( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'Database': {'Name': database_name}, - }, - Permissions=permissions, - ) - log.info( - f'Successfully granted principal {principal} permissions {permissions} ' - f'to {database_name}' - ) - except ClientError as e: - log.error( - f'Could not grant permissions ' - f'principal {principal} ' - f'{permissions} to database {database_name} due to: {e}' - ) - - @staticmethod - def grant_permissions_to_table( - client, - principal, - database_name, - table_name, - permissions, - permissions_with_grant_options=None, - ): - try: - grant_dict = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={'Table': {'DatabaseName': database_name, 'Name': table_name}}, - Permissions=permissions, - ) - if permissions_with_grant_options: - grant_dict[ - 'PermissionsWithGrantOption' - ] = permissions_with_grant_options - - response = client.grant_permissions(**grant_dict) - - log.info( - f'Successfully granted principal {principal} permissions {permissions} ' - f'to {database_name}.{table_name}: {response}' - ) - except ClientError as e: - log.warning( - f'Could not grant principal {principal} ' - f'permissions {permissions} to table ' - f'{database_name}.{table_name} due to: {e}' - ) - # raise e - - @staticmethod - def revoke_iamallowedgroups_super_permission_from_table( - client, accountid, database, table - ): - """ - When upgrading to LF tables can still have IAMAllowedGroups permissions - Unless this is revoked the table can not be shared using LakeFormation - :param client: - :param accountid: - :param database: - :param table: - :return: - """ - try: - log.info( - f'Revoking IAMAllowedGroups Super ' - f'permission for table {database}|{table}' - ) - LakeFormation.batch_revoke_permissions( - client, - accountid, - entries=[ - { - 'Id': str(uuid.uuid4()), - 'Principal': {'DataLakePrincipalIdentifier': 'EVERYONE'}, - 'Resource': { - 'Table': { - 'DatabaseName': database, - 'Name': table, - 'CatalogId': accountid, - } - }, - 'Permissions': ['ALL'], - 'PermissionsWithGrantOption': [], - } - ], - ) - except ClientError as e: - log.debug( - f'Could not revoke IAMAllowedGroups Super ' - f'permission on table {database}|{table} due to {e}' - ) - - @staticmethod - def batch_revoke_permissions(client, accountid, entries): - """ - Batch revoke permissions to entries - Retry is set for api throttling - :param client: - :param accountid: - :param entries: - :return: - """ - log.info(f'Batch Revoking {entries}') - entries_chunks: list = [entries[i : i + 20] for i in range(0, len(entries), 20)] - failures = [] - try: - for entries_chunk in entries_chunks: - response = client.batch_revoke_permissions( - CatalogId=accountid, Entries=entries_chunk - ) - log.info(f'Batch Revoke response: {response}') - failures.extend(response.get('Failures')) - - for failure in failures: - if not ( - failure['Error']['ErrorCode'] == 'InvalidInputException' - and ( - 'Grantee has no permissions' in failure['Error']['ErrorMessage'] - or 'No permissions revoked' in failure['Error']['ErrorMessage'] - or 'not found' in failure['Error']['ErrorMessage'] - ) - ): - raise ClientError( - error_response={ - 'Error': { - 'Code': 'LakeFormation.batch_revoke_permissions', - 'Message': f'Operation ended with failures: {failures}', - } - }, - operation_name='LakeFormation.batch_revoke_permissions', - ) - - except ClientError as e: - log.warning(f'Batch Revoke ended with failures: {failures}') - raise e - - @staticmethod - def grant_resource_link_permission_on_target(client, source, target): - for principal in target['principals']: - try: - table_grant = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'TableWithColumns': { - 'DatabaseName': source['database'], - 'Name': source['tablename'], - 'ColumnWildcard': {}, - 'CatalogId': source['accountid'], - } - }, - Permissions=['DESCRIBE', 'SELECT'], - PermissionsWithGrantOption=[], - ) - client.grant_permissions(**table_grant) - log.info( - f'Successfully granted permissions DESCRIBE,SELECT to {principal} on target ' - f'{source["accountid"]}://{source["database"]}/{source["tablename"]}' - ) - except ClientError as e: - logging.error( - f'Failed granting principal {principal} ' - 'read access to resource link on target' - f' {source["accountid"]}://{source["database"]}/{source["tablename"]} ' - f'due to: {e}' - ) - raise e - - @staticmethod - def grant_resource_link_permission(client, source, target, target_database): - for principal in target['principals']: - resourcelink_grant = dict( - Principal={'DataLakePrincipalIdentifier': principal}, - Resource={ - 'Table': { - 'DatabaseName': target_database, - 'Name': source['tablename'], - 'CatalogId': target['accountid'], - } - }, - # Resource link only supports DESCRIBE and DROP permissions no SELECT - Permissions=['DESCRIBE'], - ) - try: - client.grant_permissions(**resourcelink_grant) - log.info( - f'Granted resource link DESCRIBE access ' - f'to principal {principal} on {target["accountid"]}://{target_database}/{source["tablename"]}' - ) - except ClientError as e: - logging.error( - f'Failed granting principal {principal} ' - f'read access to resource link on {target["accountid"]}://{target_database}/{source["tablename"]} ' - f'due to: {e}' - ) - raise e - - @staticmethod - def revoke_source_table_access(**data): - """ - Revokes permissions for a principal in a cross account sharing setup - Parameters - ---------- - data : - - Returns - ------- - - """ - logging.info(f'Revoking source table access: {data} ...') - target_accountid = data['target_accountid'] - region = data['region'] - target_principals = data['target_principals'] - source_database = data['source_database'] - source_table = data['source_table'] - source_accountid = data['source_accountid'] - for target_principal in target_principals: - try: - - aws_session = SessionHelper.remote_session(target_accountid) - lakeformation = aws_session.client('lakeformation', region_name=region) - - logging.info('Revoking DESCRIBE permission...') - lakeformation.revoke_permissions( - Principal=dict(DataLakePrincipalIdentifier=target_principal), - Resource=dict( - Table=dict( - CatalogId=source_accountid, - DatabaseName=source_database, - Name=source_table, - ) - ), - Permissions=['DESCRIBE'], - PermissionsWithGrantOption=[], - ) - logging.info('Successfully revoked DESCRIBE permissions') - - logging.info('Revoking SELECT permission...') - lakeformation.revoke_permissions( - Principal=dict(DataLakePrincipalIdentifier=target_principal), - Resource=dict( - TableWithColumns=dict( - CatalogId=source_accountid, - DatabaseName=source_database, - Name=source_table, - ColumnWildcard={}, - ) - ), - Permissions=['SELECT'], - PermissionsWithGrantOption=[], - ) - logging.info('Successfully revoked DESCRIBE permissions') - - except ClientError as e: - logging.error( - f'Failed to revoke permissions for {target_principal} ' - f'on source table {source_accountid}/{source_database}/{source_table} ' - f'due to: {e}' - ) - raise e diff --git a/backend/dataall/aws/handlers/quicksight.py b/backend/dataall/aws/handlers/quicksight.py deleted file mode 100644 index 67790486a..000000000 --- a/backend/dataall/aws/handlers/quicksight.py +++ /dev/null @@ -1,547 +0,0 @@ -import logging -import re -import os -import ast - -from botocore.exceptions import ClientError -from .sts import SessionHelper -from .secrets_manager import SecretsManager -from .parameter_store import ParameterStoreManager - -logger = logging.getLogger('QuicksightHandler') -logger.setLevel(logging.DEBUG) - - -class Quicksight: - - _DEFAULT_GROUP_NAME = 'dataall' - - def __init__(self): - pass - - @staticmethod - def get_quicksight_client(AwsAccountId, region='eu-west-1'): - """Returns a boto3 quicksight client in the provided account/region - Args: - AwsAccountId(str) : aws account id - region(str) : aws region - Returns : boto3.client ("quicksight") - """ - session = SessionHelper.remote_session(accountid=AwsAccountId) - return session.client('quicksight', region_name=region) - - @staticmethod - def get_identity_region(AwsAccountId): - """Quicksight manages identities in one region, and there is no API to retrieve it - However, when using Quicksight user/group apis in the wrong region, - the client will throw and exception showing the region Quicksight's using as its - identity region. - Args: - AwsAccountId(str) : aws account id - Returns: str - the region quicksight uses as identity region - """ - identity_region_rex = re.compile('Please use the (?P.*) endpoint.') - identity_region = 'us-east-1' - client = Quicksight.get_quicksight_client(AwsAccountId=AwsAccountId, region=identity_region) - try: - response = client.describe_group( - AwsAccountId=AwsAccountId, GroupName=Quicksight._DEFAULT_GROUP_NAME, Namespace='default' - ) - except client.exceptions.AccessDeniedException as e: - match = identity_region_rex.findall(str(e)) - if match: - identity_region = match[0] - else: - raise e - except client.exceptions.ResourceNotFoundException: - pass - return identity_region - - @staticmethod - def get_quicksight_client_in_identity_region(AwsAccountId): - """Returns a boto3 quicksight client in the Quicksight identity region for the provided account - Args: - AwsAccountId(str) : aws account id - Returns : boto3.client ("quicksight") - - """ - identity_region = Quicksight.get_identity_region(AwsAccountId) - session = SessionHelper.remote_session(accountid=AwsAccountId) - return session.client('quicksight', region_name=identity_region) - - @staticmethod - def check_quicksight_enterprise_subscription(AwsAccountId, region=None): - """Use the DescribeAccountSubscription operation to receive a description of a Amazon QuickSight account's subscription. A successful API call returns an AccountInfo object that includes an account's name, subscription status, authentication type, edition, and notification email address. - Args: - AwsAccountId(str) : aws account id - region(str): aws region - Returns: bool - True if Quicksight Enterprise Edition is enabled in the AWS Account - """ - logger.info(f'Checking Quicksight subscription in AWS account = {AwsAccountId}') - client = Quicksight.get_quicksight_client(AwsAccountId=AwsAccountId, region=region) - try: - response = client.describe_account_subscription(AwsAccountId=AwsAccountId) - if not response['AccountInfo']: - raise Exception(f'Quicksight Enterprise Subscription not found in Account: {AwsAccountId}') - else: - if response['AccountInfo']['Edition'] not in ['ENTERPRISE', 'ENTERPRISE_AND_Q']: - raise Exception( - f"Quicksight Subscription found in Account: {AwsAccountId} of incorrect type: {response['AccountInfo']['Edition']}") - else: - if response['AccountInfo']['AccountSubscriptionStatus'] == 'ACCOUNT_CREATED': - return True - else: - raise Exception( - f"Quicksight Subscription found in Account: {AwsAccountId} not active. Status = {response['AccountInfo']['AccountSubscriptionStatus']}") - - except client.exceptions.ResourceNotFoundException: - raise Exception('Quicksight Enterprise Subscription not found') - - except client.exceptions.AccessDeniedException: - raise Exception('Access denied to Quicksight for selected role') - return False - - @staticmethod - def create_quicksight_group(AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): - """Creates a Quicksight group called GroupName - Args: - AwsAccountId(str): aws account - GroupName(str): name of the QS group - - Returns:dict - quicksight.describe_group response - """ - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - group = Quicksight.describe_group(client, AwsAccountId, GroupName) - if not group: - if GroupName == Quicksight._DEFAULT_GROUP_NAME: - logger.info(f'Initializing data.all default group = {GroupName}') - Quicksight.check_quicksight_enterprise_subscription(AwsAccountId) - - logger.info(f'Attempting to create Quicksight group `{GroupName}...') - response = client.create_group( - GroupName=GroupName, - Description='data.all group', - AwsAccountId=AwsAccountId, - Namespace='default', - ) - logger.info(f'Quicksight group {GroupName} created {response}') - response = client.describe_group( - AwsAccountId=AwsAccountId, GroupName=GroupName, Namespace='default' - ) - return response - return group - - @staticmethod - def describe_group(client, AwsAccountId, GroupName=_DEFAULT_GROUP_NAME): - try: - response = client.describe_group( - AwsAccountId=AwsAccountId, GroupName=GroupName, Namespace='default' - ) - logger.info( - f'Quicksight {GroupName} group already exists in {AwsAccountId} ' - f'(using identity region {Quicksight.get_identity_region(AwsAccountId)}): ' - f'{response}' - ) - return response - except client.exceptions.ResourceNotFoundException: - logger.info( - f'Creating Quicksight group in {AwsAccountId} (using identity region {Quicksight.get_identity_region(AwsAccountId)})' - ) - - @staticmethod - def describe_user(AwsAccountId, UserName): - """Describes a QS user, returns None if not found - Args: - AwsAccountId(str) : aws account - UserName(str) : name of the QS user - """ - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - try: - response = client.describe_user( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - exists = True - except ClientError: - return None - return response.get('User') - - @staticmethod - def get_quicksight_group_arn(AwsAccountId): - default_group_arn = None - group = Quicksight.describe_group( - client=Quicksight.get_quicksight_client_in_identity_region( - AwsAccountId=AwsAccountId - ), - AwsAccountId=AwsAccountId, - ) - if group and group.get('Group', {}).get('Arn'): - default_group_arn = group.get('Group', {}).get('Arn') - - return default_group_arn - - @staticmethod - def list_user_groups(AwsAccountId, UserName): - client = Quicksight.get_quicksight_client_in_identity_region(AwsAccountId) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return [] - response = client.list_user_groups( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - return response['GroupList'] - - @staticmethod - def register_user_in_group(AwsAccountId, UserName, GroupName, UserRole='READER'): - client = Quicksight.get_quicksight_client_in_identity_region( - AwsAccountId=AwsAccountId - ) - - Quicksight.create_quicksight_group(AwsAccountId, GroupName) - - exists = False - user = Quicksight.describe_user(AwsAccountId, UserName=UserName) - - if user is not None: - exists = True - - if exists: - response = client.update_user( - UserName=UserName, - AwsAccountId=AwsAccountId, - Namespace='default', - Email=UserName, - Role=UserRole, - ) - else: - response = client.register_user( - UserName=UserName, - Email=UserName, - AwsAccountId=AwsAccountId, - Namespace='default', - IdentityType='QUICKSIGHT', - UserRole=UserRole, - ) - member = False - - response = client.list_user_groups( - UserName=UserName, AwsAccountId=AwsAccountId, Namespace='default' - ) - logger.info( - f'list_user_groups for {UserName}: {response})' - ) - if GroupName not in [g['GroupName'] for g in response['GroupList']]: - logger.warning(f'Adding {UserName} to Quicksight group {GroupName} on {AwsAccountId}') - response = client.create_group_membership( - MemberName=UserName, - GroupName=GroupName, - AwsAccountId=AwsAccountId, - Namespace='default', - ) - return Quicksight.describe_user(AwsAccountId, UserName) - - @staticmethod - def get_reader_session( - AwsAccountId, region, UserName, UserRole='READER', DashboardId=None - ): - - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if user is None: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - - response = client.get_dashboard_embed_url( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - IdentityType='QUICKSIGHT', - SessionLifetimeInMinutes=120, - UserArn=user.get('Arn'), - ) - return response.get('EmbedUrl') - - @staticmethod - def check_dashboard_permissions(AwsAccountId, region, DashboardId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - response = client.describe_dashboard_permissions( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId - )['Permissions'] - logger.info(f"Dashboard initial permissions: {response}") - read_principals = [] - write_principals = [] - - for a, p in zip([p["Actions"] for p in response], [p["Principal"] for p in response]): - write_principals.append(p) if "Update" in str(a) else read_principals.append(p) - - logger.info(f"Dashboard updated permissions, Read principals: {read_principals}") - logger.info(f"Dashboard updated permissions, Write principals: {write_principals}") - - return read_principals, write_principals - - @staticmethod - def get_shared_reader_session( - AwsAccountId, region, UserName, GroupName, UserRole='READER', DashboardId=None - ): - - client = Quicksight.get_quicksight_client(AwsAccountId, region) - identity_region = Quicksight.get_identity_region(AwsAccountId) - groupPrincipal = f"arn:aws:quicksight:{identity_region}:{AwsAccountId}:group/default/{GroupName}" - - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=GroupName, UserRole=UserRole - ) - - read_principals, write_principals = Quicksight.check_dashboard_permissions( - AwsAccountId=AwsAccountId, - region=region, - DashboardId=DashboardId - ) - - if groupPrincipal not in read_principals: - permissions = client.update_dashboard_permissions( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - GrantPermissions=[ - { - 'Principal': groupPrincipal, - 'Actions': [ - "quicksight:DescribeDashboard", - "quicksight:ListDashboardVersions", - "quicksight:QueryDashboard", - ] - }, - ] - ) - logger.info(f"Permissions granted: {permissions}") - - response = client.get_dashboard_embed_url( - AwsAccountId=AwsAccountId, - DashboardId=DashboardId, - IdentityType='QUICKSIGHT', - SessionLifetimeInMinutes=120, - UserArn=user.get('Arn'), - ) - return response.get('EmbedUrl') - - @staticmethod - def get_anonymous_session(AwsAccountId, region, UserName, DashboardId=None): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - response = client.generate_embed_url_for_anonymous_user( - AwsAccountId=AwsAccountId, - SessionLifetimeInMinutes=120, - Namespace='default', - SessionTags=[ - {'Key': Quicksight._DEFAULT_GROUP_NAME, 'Value': UserName}, - ], - AuthorizedResourceArns=[ - f'arn:aws:quicksight:{region}:{AwsAccountId}:dashboard/{DashboardId}', - ], - ExperienceConfiguration={'Dashboard': {'InitialDashboardId': DashboardId}}, - ) - return response.get('EmbedUrl') - - @staticmethod - def get_author_session(AwsAccountId, region, UserName, UserRole='AUTHOR'): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName=UserName) - if user is None: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - elif user.get("Role", None) not in ["AUTHOR", "ADMIN"]: - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole=UserRole - ) - else: - pass - response = client.get_session_embed_url( - AwsAccountId=AwsAccountId, - EntryPoint='/start/dashboards', - SessionLifetimeInMinutes=120, - UserArn=user['Arn'], - ) - return response['EmbedUrl'] - - @staticmethod - def can_import_dashboard(AwsAccountId, region, UserName, DashboardId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - groups = Quicksight.list_user_groups(AwsAccountId, UserName) - grouparns = [g['Arn'] for g in groups] - try: - response = client.describe_dashboard_permissions( - AwsAccountId=AwsAccountId, DashboardId=DashboardId - ) - except ClientError as e: - raise e - - permissions = response.get('Permissions', []) - for p in permissions: - if p['Principal'] == user.get('Arn') or p['Principal'] in grouparns: - for a in p['Actions']: - if a in [ - 'quicksight:UpdateDashboard', - 'UpdateDashboardPermissions', - ]: - return True - - return False - - @staticmethod - def create_data_source_vpc(AwsAccountId, region, UserName, vpcConnectionId): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - identity_region = 'us-east-1' - user = Quicksight.register_user_in_group( - AwsAccountId=AwsAccountId, UserName=UserName, GroupName=Quicksight._DEFAULT_GROUP_NAME, UserRole='AUTHOR' - ) - try: - response = client.describe_data_source( - AwsAccountId=AwsAccountId, DataSourceId="dataall-metadata-db" - ) - - except client.exceptions.ResourceNotFoundException: - aurora_secret_arn = ParameterStoreManager.get_parameter_value(AwsAccountId=AwsAccountId, region=region, parameter_path=f'/dataall/{os.getenv("envname", "local")}/aurora/secret_arn') - aurora_params = SecretsManager.get_secret_value( - AwsAccountId=AwsAccountId, region=region, secretId=aurora_secret_arn - ) - aurora_params_dict = ast.literal_eval(aurora_params) - response = client.create_data_source( - AwsAccountId=AwsAccountId, - DataSourceId="dataall-metadata-db", - Name="dataall-metadata-db", - Type="AURORA_POSTGRESQL", - DataSourceParameters={ - 'AuroraPostgreSqlParameters': { - 'Host': aurora_params_dict["host"], - 'Port': "5432", - 'Database': aurora_params_dict["dbname"] - } - }, - Credentials={ - "CredentialPair": { - "Username": aurora_params_dict["username"], - "Password": aurora_params_dict["password"], - } - }, - Permissions=[ - { - "Principal": f"arn:aws:quicksight:{region}:{AwsAccountId}:group/default/dataall", - "Actions": [ - "quicksight:UpdateDataSourcePermissions", - "quicksight:DescribeDataSource", - "quicksight:DescribeDataSourcePermissions", - "quicksight:PassDataSource", - "quicksight:UpdateDataSource", - "quicksight:DeleteDataSource" - ] - } - ], - VpcConnectionProperties={ - 'VpcConnectionArn': f"arn:aws:quicksight:{region}:{AwsAccountId}:vpcConnection/{vpcConnectionId}" - } - ) - - return "dataall-metadata-db" - - @staticmethod - def create_data_set_from_source(AwsAccountId, region, UserName, dataSourceId, tablesToImport): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - data_source = client.describe_data_source( - AwsAccountId=AwsAccountId, - DataSourceId=dataSourceId - ) - - if not data_source: - return False - - for table in tablesToImport: - - response = client.create_data_set( - AwsAccountId=AwsAccountId, - DataSetId=f"dataall-imported-{table}", - Name=f"dataall-imported-{table}", - PhysicalTableMap={ - 'string': { - 'RelationalTable': { - 'DataSourceArn': data_source.get('DataSource').get('Arn'), - 'Catalog': 'string', - 'Schema': 'dev', - 'Name': table, - 'InputColumns': [ - { - 'Name': 'string', - 'Type': 'STRING' - }, - ] - } - }}, - ImportMode='DIRECT_QUERY', - Permissions=[ - { - 'Principal': user.get('Arn'), - 'Actions': [ - "quicksight:DescribeDataSet", - "quicksight:DescribeDataSetPermissions", - "quicksight:PassDataSet", - "quicksight:DescribeIngestion", - "quicksight:ListIngestions" - ] - }, - ], - ) - - return True - - @staticmethod - def create_analysis(AwsAccountId, region, UserName): - client = Quicksight.get_quicksight_client(AwsAccountId, region) - user = Quicksight.describe_user(AwsAccountId, UserName) - if not user: - return False - - response = client.create_analysis( - AwsAccountId=AwsAccountId, - AnalysisId='dataallMonitoringAnalysis', - Name='dataallMonitoringAnalysis', - Permissions=[ - { - 'Principal': user.get('Arn'), - 'Actions': [ - 'quicksight:DescribeAnalysis', - 'quicksight:DescribeAnalysisPermissions', - 'quicksight:UpdateAnalysisPermissions', - 'quicksight:UpdateAnalysis' - ] - }, - ], - SourceEntity={ - 'SourceTemplate': { - 'DataSetReferences': [ - { - 'DataSetPlaceholder': 'environment', - 'DataSetArn': f"arn:aws:quicksight:{region}:{AwsAccountId}:dataset/" - }, - ], - 'Arn': ' pg_backend_pid();' - ) - Redshift.run_query( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - 'database': cluster.databaseName, - 'dbuser': cluster.masterUsername, - 'sql_query': kill_sessionsquery, - } - ) - Redshift.run_query( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - 'database': cluster.databaseName, - 'dbuser': cluster.masterUsername, - 'sql_query': f'REVOKE ALL ON SCHEMA {database} TO {cluster.databaseUser} ', - } - ) - Redshift.run_query( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - 'database': cluster.databaseName, - 'dbuser': cluster.masterUsername, - 'sql_query': f'drop schema {database}', - } - ) - return True - - @staticmethod - def get_cluster_catalog_databases(session, task): - try: - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, task.targetUri - ) - env = db.api.Environment.get_environment_by_uri( - session, cluster.environmentUri - ) - cluster_datasets = db.api.RedshiftCluster.list_all_cluster_datasets( - session, cluster.clusterUri - ) - secretsmanager = SessionHelper.remote_session(cluster.AwsAccountId).client( - 'secretsmanager', region_name=cluster.region - ) - Redshift.set_cluster_secrets(secretsmanager, cluster) - catalog_databases = [] - for d in cluster_datasets: - dataset = db.api.Dataset.get_dataset_by_uri(session, d.datasetUri) - if dataset.environmentUri != cluster.environmentUri: - catalog_databases.append(f'{dataset.GlueDatabaseName}shared') - else: - catalog_databases.append(f'{dataset.GlueDatabaseName}') - - log.info(f'Found Schemas to create with Spectrum {catalog_databases}') - except ClientError as e: - log.error(e, exc_info=True) - raise e - return catalog_databases, cluster, env - - @staticmethod - @Worker.handler(path='redshift.cluster.tag') - def tag_cluster(engine, task): - with engine.scoped_session() as session: - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, task.targetUri - ) - try: - accountid = cluster.AwsAccountId - region = cluster.region - session = SessionHelper.remote_session(accountid) - client_redshift = session.client('redshift', region_name=region) - client_redshift.create_tags( - ResourceName=f'arn:aws:redshift:{region}:{accountid}:cluster:{cluster.name}', - Tags=[{'Key': 'dataall', 'Value': 'true'}], - ) - except ClientError as e: - log.error(e, exc_info=True) - raise e - - @staticmethod - @Worker.handler(path='redshift.iam_roles.update') - def update_cluster_roles(engine, task: models.Task): - with engine.scoped_session() as session: - cluster = db.api.RedshiftCluster.get_redshift_cluster_by_uri( - session, task.targetUri - ) - environment: models.Environment = session.query(models.Environment).get( - cluster.environmentUri - ) - log.info( - f'Updating cluster {cluster.name}|{environment.AwsAccountId} ' - f'with environment role {environment.EnvironmentDefaultIAMRoleArn}' - ) - try: - accountid = cluster.AwsAccountId - region = cluster.region - aws_session = SessionHelper.remote_session(accountid) - client_redshift = aws_session.client('redshift', region_name=region) - client_redshift.modify_cluster_iam_roles( - ClusterIdentifier=cluster.name, - AddIamRoles=[ - environment.EnvironmentDefaultIAMRoleArn, - ], - ) - log.info( - f'Successfully Updated cluster {cluster.name}|{environment.AwsAccountId} ' - f'with environment role {environment.EnvironmentDefaultIAMRoleArn}' - ) - except ClientError as e: - log.error(e, exc_info=True) - raise e - - @staticmethod - @Worker.handler(path='redshift.subscriptions.copy') - def copy_data(engine, task: models.Task): - with engine.scoped_session() as session: - - environment: models.Environment = session.query(models.Environment).get( - task.targetUri - ) - - dataset: models.Dataset = db.api.Dataset.get_dataset_by_uri( - session, task.payload['datasetUri'] - ) - - table: models.DatasetTable = db.api.DatasetTable.get_dataset_table_by_uri( - session, task.payload['tableUri'] - ) - - env_clusters = ( - session.query(models.RedshiftCluster) - .filter( - models.RedshiftCluster.environmentUri == environment.environmentUri, - ) - .all() - ) - - log.info(f"Received Message {task.payload['message']}") - - message = task.payload['message'] - - if not message: - raise Exception('Task message can not be found') - - glue_table = Glue.table_exists( - **{ - 'accountid': table.AWSAccountId, - 'region': table.region, - 'database': table.GlueDatabaseName, - 'tablename': table.GlueTableName, - } - ) - columns = ( - glue_table.get('Table').get('StorageDescriptor', {}).get('Columns') - ) - log.info(f'Glue table columns: {columns}') - - ddl_columns = ','.join( - [ - f"{col['Name']} {Redshift.convert_to_redshift_types(col['Type'])}" - for col in columns - ] - ) - log.info(f'DDL Columns: {ddl_columns}') - - for cluster in env_clusters: - cluster_dataset_table = ( - db.api.RedshiftCluster.get_cluster_dataset_table( - session, cluster.clusterUri, dataset.datasetUri, table.tableUri - ) - ) - if cluster_dataset_table: - log.info( - f'Cluster {cluster}|{environment.AwsAccountId} ' - f'copy from {dataset.name} for table {table.GlueTableName} is enabled' - ) - queries = list() - queries.append( - f'CREATE SCHEMA IF NOT EXISTS {cluster_dataset_table.schema}' - ) - queries.append( - f'GRANT ALL ON SCHEMA {cluster_dataset_table.schema} TO {cluster.databaseUser}' - ) - queries.append( - f'GRANT ALL ON SCHEMA {cluster_dataset_table.schema} TO GROUP PUBLIC' - ) - queries.append( - Redshift.get_create_table_statement( - cluster_dataset_table.schema, - table.GlueTableName, - ddl_columns, - ) - ) - queries.append( - f'GRANT ALL ON TABLE {cluster_dataset_table.schema}.{table.GlueTableName} TO {cluster.databaseUser}' - ) - queries.append( - f'GRANT ALL ON TABLE {cluster_dataset_table.schema}.{table.GlueTableName} TO GROUP PUBLIC' - ) - data_prefix = Redshift.get_data_prefix(cluster_dataset_table) - queries.extend( - Redshift.get_merge_table_statements( - cluster_dataset_table.schema, - table.GlueTableName, - data_prefix, - environment.EnvironmentDefaultIAMRoleArn, - ddl_columns, - ) - ) - for query in queries: - Redshift.run_query( - **{ - 'accountid': cluster.AwsAccountId, - 'region': cluster.region, - 'cluster_id': cluster.name, - 'database': cluster.databaseName, - 'dbuser': cluster.databaseUser, - 'sql_query': query, - } - ) - return True - - @staticmethod - def get_data_prefix(table: models.RedshiftClusterDatasetTable): - data_prefix = ( - table.dataLocation - if '/packages.delta' not in table.dataLocation - else table.dataLocation.replace('/packages.delta', '') - ) - data_prefix = ( - data_prefix - if '/_symlink_format_manifest' not in data_prefix - else data_prefix.replace('/_symlink_format_manifest', '') - ) - return data_prefix - - @staticmethod - def get_create_table_statement(schema, table_name, columns): - return f'CREATE TABLE IF NOT EXISTS {schema}.{table_name}({columns})' - - @staticmethod - def get_copy_table_statement(schema, table_name, data_prefix, iam_role_arn): - return ( - f'COPY {schema}.{table_name} ' - f"FROM '{data_prefix}' " - f"iam_role '{iam_role_arn}' " - ) - - @staticmethod - def convert_to_redshift_types(dtypes): - redshift_sql_map = { - 'long': 'bigint', - 'double': 'bigint', - 'string': 'varchar(max)', - } - return ( - redshift_sql_map[dtypes.lower()] - if redshift_sql_map.get(dtypes.lower()) - else dtypes - ) - - @staticmethod - def get_merge_table_statements( - schema, table_name, data_prefix, iam_role_arn, columns - ): - statements = list() - statements.append( - f"""CREATE TABLE "{schema}"."{table_name}_stage"({columns});""" - ) - statements.append( - f"""COPY "{schema}"."{table_name}_stage" FROM '{data_prefix}' iam_role '{iam_role_arn}' format as parquet;""" - ) - statements.append( - f"""CREATE TABLE "{schema}"."{table_name}_stage"({columns};""" - ) - statements.append( - f""" - -- Start a new transaction - begin transaction; - - drop table if exists "{schema}"."{table_name}"; - - -- Insert all the rows from the staging table into the target table - alter table "{schema}"."{table_name}_stage" rename to "{table_name}"; - - -- End transaction and commit - end transaction; - """ - ) - return statements diff --git a/backend/dataall/aws/handlers/s3.py b/backend/dataall/aws/handlers/s3.py deleted file mode 100755 index bcd0ad440..000000000 --- a/backend/dataall/aws/handlers/s3.py +++ /dev/null @@ -1,198 +0,0 @@ -import logging - -from ... import db -from ...db import models -from .service_handlers import Worker -from .sts import SessionHelper - -log = logging.getLogger(__name__) - - -class S3: - @staticmethod - @Worker.handler(path='s3.prefix.create') - def create_dataset_location(engine, task: models.Task): - with engine.scoped_session() as session: - location = db.api.DatasetStorageLocation.get_location_by_uri( - session, task.targetUri - ) - S3.create_bucket_prefix(location) - return location - - @staticmethod - def client(account_id: str, region: str, client_type: str): - session = SessionHelper.remote_session(accountid=account_id) - return session.client(client_type, region_name=region) - - @staticmethod - def create_bucket_prefix(location): - try: - accountid = location.AWSAccountId - region = location.region - s3cli = S3.client(account_id=accountid, region=region, client_type='s3') - response = s3cli.put_object( - Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' - ) - log.info( - 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( - location.S3BucketName, accountid, response - ) - ) - location.locationCreated = True - except Exception as e: - log.error( - f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' - ) - raise e - - @staticmethod - def create_bucket_policy(account_id: str, region: str, bucket_name: str, policy: str): - try: - s3cli = S3.client(account_id=account_id, region=region, client_type='s3') - s3cli.put_bucket_policy( - Bucket=bucket_name, - Policy=policy, - ConfirmRemoveSelfBucketAccess=False, - ExpectedBucketOwner=account_id, - ) - log.info( - f'Created bucket policy of {bucket_name} on {account_id} successfully' - ) - except Exception as e: - log.error( - f'Bucket policy created failed on bucket {bucket_name} of {account_id} : {e}' - ) - raise e - - @staticmethod - def get_bucket_policy(account_id: str, region: str, bucket_name: str): - try: - s3cli = S3.client(account_id=account_id, region=region, client_type='s3') - response = s3cli.get_bucket_policy(Bucket=bucket_name, ExpectedBucketOwner=account_id) - except Exception as e: - log.warning( - f'Failed to get bucket policy of {bucket_name} : {e}' - ) - return None - else: - return response['Policy'] - - @staticmethod - def get_bucket_access_point_arn(account_id: str, region: str, access_point_name: str): - try: - s3control = S3.client(account_id, region, 's3control') - access_point = s3control.get_access_point( - AccountId=account_id, - Name=access_point_name, - ) - except Exception as e: - log.info( - f'Failed to get S3 bucket access point {access_point_name} on {account_id} : {e}' - ) - return None - else: - return access_point["AccessPointArn"] - - @staticmethod - def create_bucket_access_point(account_id: str, region: str, bucket_name: str, access_point_name: str): - try: - s3control = S3.client(account_id, region, 's3control') - access_point = s3control.create_access_point( - AccountId=account_id, - Name=access_point_name, - Bucket=bucket_name, - ) - except Exception as e: - log.error( - f'S3 bucket access point creation failed for location {bucket_name} : {e}' - ) - raise e - else: - return access_point["AccessPointArn"] - - @staticmethod - def delete_bucket_access_point(account_id: str, region: str, access_point_name: str): - try: - s3control = S3.client(account_id, region, 's3control') - s3control.delete_access_point( - AccountId=account_id, - Name=access_point_name, - ) - except Exception as e: - log.error( - f'Failed to delete S3 bucket access point {access_point_name}/{account_id} : {e}' - ) - raise e - - @staticmethod - def get_access_point_policy(account_id: str, region: str, access_point_name: str): - try: - s3control = S3.client(account_id, region, 's3control') - response = s3control.get_access_point_policy( - AccountId=account_id, - Name=access_point_name, - ) - except Exception as e: - log.info( - f'Failed to get policy of access point {access_point_name} on {account_id} : {e}' - ) - return None - else: - return response['Policy'] - - @staticmethod - def attach_access_point_policy(account_id: str, region: str, access_point_name: str, policy: str): - try: - s3control = S3.client(account_id, region, 's3control') - s3control.put_access_point_policy( - AccountId=account_id, - Name=access_point_name, - Policy=policy - ) - except Exception as e: - log.error( - f'S3 bucket access point policy creation failed : {e}' - ) - raise e - - @staticmethod - def generate_access_point_policy_template( - principal_id: str, - access_point_arn: str, - s3_prefix: str, - ): - policy = { - 'Version': '2012-10-17', - "Statement": [ - { - "Sid": f"{principal_id}0", - "Effect": "Allow", - "Principal": { - "AWS": "*" - }, - "Action": "s3:ListBucket", - "Resource": f"{access_point_arn}", - "Condition": { - "StringLike": { - "s3:prefix": [f"{s3_prefix}/*"], - "aws:userId": [f"{principal_id}:*"] - } - } - }, - { - "Sid": f"{principal_id}1", - "Effect": "Allow", - "Principal": { - "AWS": "*" - }, - "Action": "s3:GetObject", - "Resource": [f"{access_point_arn}/object/{s3_prefix}/*"], - "Condition": { - "StringLike": { - "aws:userId": [f"{principal_id}:*"] - } - } - } - ] - } - return policy diff --git a/backend/dataall/aws/handlers/sagemaker.py b/backend/dataall/aws/handlers/sagemaker.py deleted file mode 100644 index 653740cf1..000000000 --- a/backend/dataall/aws/handlers/sagemaker.py +++ /dev/null @@ -1,86 +0,0 @@ -import logging - -from .sts import SessionHelper -from botocore.exceptions import ClientError - -logger = logging.getLogger(__name__) - - -class Sagemaker: - @staticmethod - def client(AwsAccountId, region): - session = SessionHelper.remote_session(accountid=AwsAccountId) - return session.client('sagemaker', region_name=region) - - @staticmethod - def get_notebook_instance_status(AwsAccountId, region, NotebookInstanceName): - try: - client = Sagemaker.client(AwsAccountId, region) - response = client.describe_notebook_instance( - NotebookInstanceName=NotebookInstanceName - ) - return response.get('NotebookInstanceStatus', 'NOT FOUND') - except ClientError as e: - logger.error( - f'Could not retrieve instance {NotebookInstanceName} status due to: {e} ' - ) - return 'NOT FOUND' - - @staticmethod - def presigned_url(AwsAccountId, region, NotebookInstanceName): - try: - client = Sagemaker.client(AwsAccountId, region) - response = client.create_presigned_notebook_instance_url( - NotebookInstanceName=NotebookInstanceName - ) - return response['AuthorizedUrl'] - except ClientError as e: - raise e - - @staticmethod - def presigned_url_jupyterlab(AwsAccountId, region, NotebookInstanceName): - try: - client = Sagemaker.client(AwsAccountId, region) - response = client.create_presigned_notebook_instance_url( - NotebookInstanceName=NotebookInstanceName - ) - url_parts = response['AuthorizedUrl'].split('?authToken') - url = url_parts[0] + '/lab' + '?authToken' + url_parts[1] - return url - except ClientError as e: - raise e - - @staticmethod - def start_instance(AwsAccountId, region, NotebookInstanceName): - try: - client = Sagemaker.client(AwsAccountId, region) - status = Sagemaker.get_notebook_instance_status( - AwsAccountId, region, NotebookInstanceName - ) - client.start_notebook_instance(NotebookInstanceName=NotebookInstanceName) - return status - except ClientError as e: - return e - - @staticmethod - def stop_instance(AwsAccountId, region, NotebookInstanceName): - try: - client = Sagemaker.client(AwsAccountId, region) - client.stop_notebook_instance(NotebookInstanceName=NotebookInstanceName) - except ClientError as e: - raise e - - @staticmethod - def get_security_groups(AwsAccountId, region): - try: - session = SessionHelper.remote_session(accountid=AwsAccountId) - client = session.client('ec2', region_name=region) - response = client.describe_security_groups() - sgnames = [SG['GroupName'] for SG in response['SecurityGroups']] - sgindex = [ - i for i, s in enumerate(sgnames) if 'DefaultLinuxSecurityGroup' in s - ] - SecurityGroupIds = [response['SecurityGroups'][sgindex[0]]['GroupId']] - return SecurityGroupIds - except ClientError as e: - raise e diff --git a/backend/dataall/aws/handlers/sagemaker_studio.py b/backend/dataall/aws/handlers/sagemaker_studio.py deleted file mode 100644 index a9150d2c0..000000000 --- a/backend/dataall/aws/handlers/sagemaker_studio.py +++ /dev/null @@ -1,103 +0,0 @@ -from botocore.exceptions import ClientError - -from .parameter_store import ParameterStoreManager -from .sts import SessionHelper -from ...db.models import Environment - - -class SagemakerStudio: - @staticmethod - def client(AwsAccountId, region, role=None): - session = SessionHelper.remote_session(accountid=AwsAccountId, role=role) - return session.client('sagemaker', region_name=region) - - @staticmethod - def get_sagemaker_studio_domain(AwsAccountId, region, role=None): - """ - Sagemaker studio domain is limited to one per account, - RETURN: an existing domain or None if no domain is in the AWS account - """ - - client = SagemakerStudio.client(AwsAccountId=AwsAccountId, region=region, role=role) - existing_domain = dict() - try: - domain_id_paginator = client.get_paginator('list_domains') - domains = domain_id_paginator.paginate() - for _domain in domains: - print(_domain) - for _domain in _domain.get('Domains'): - # Get the domain name created by dataall - if 'dataall' in _domain: - return _domain - else: - existing_domain = _domain - return existing_domain - except ClientError as e: - print(e) - return 'NotFound' - - @staticmethod - def presigned_url( - AwsAccountId, - region, - sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify, - ): - client = SagemakerStudio.client(AwsAccountId, region) - try: - response_signed_url = client.create_presigned_domain_url( - DomainId=sagemakerStudioDomainID, - UserProfileName=sagemakerStudioUserProfileNameSlugify, - ) - return response_signed_url['AuthorizedUrl'] - except ClientError: - return '' - - @staticmethod - def get_user_profile_status( - AwsAccountId, - region, - sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify, - ): - client = SagemakerStudio.client(AwsAccountId, region) - try: - response = client.describe_user_profile( - DomainId=sagemakerStudioDomainID, - UserProfileName=sagemakerStudioUserProfileNameSlugify, - ) - return response['Status'] - except ClientError as e: - print(e) - return 'NotFound' - - @staticmethod - def get_user_profile_applications( - AwsAccountId, - region, - sagemakerStudioDomainID, - sagemakerStudioUserProfileNameSlugify, - ): - client = SagemakerStudio.client(AwsAccountId, region) - _running_apps = [] - try: - paginator_app = client.get_paginator('list_apps') - response_paginator = paginator_app.paginate( - DomainIdEquals=sagemakerStudioDomainID, - UserProfileNameEquals=sagemakerStudioUserProfileNameSlugify, - ) - for _response_app in response_paginator: - for _app in _response_app['Apps']: - if _app.get('Status') not in ['Deleted']: - _running_apps.append( - dict( - DomainId=_app.get('DomainId'), - UserProfileName=_app.get('UserProfileName'), - AppType=_app.get('AppType'), - AppName=_app.get('AppName'), - Status=_app.get('Status'), - ) - ) - return _running_apps - except ClientError as e: - raise e diff --git a/backend/dataall/aws/handlers/secrets_manager.py b/backend/dataall/aws/handlers/secrets_manager.py deleted file mode 100644 index 5c278be77..000000000 --- a/backend/dataall/aws/handlers/secrets_manager.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging - -from botocore.exceptions import ClientError - -from .sts import SessionHelper - -log = logging.getLogger(__name__) - - -def ns2d(**kwargs): - return kwargs - - -class SecretsManager: - def __init__(self): - pass - - @staticmethod - def client(AwsAccountId, region): - session = SessionHelper.remote_session(AwsAccountId) - return session.client('secretsmanager', region_name=region) - - @staticmethod - def get_secret_value(AwsAccountId, region, secretId): - if not secretId: - raise Exception('Secret name is None') - try: - secret_value = SecretsManager.client( - AwsAccountId, region - ).get_secret_value(SecretId=secretId)['SecretString'] - except ClientError as e: - raise Exception(e) - return secret_value diff --git a/backend/dataall/aws/handlers/sns.py b/backend/dataall/aws/handlers/sns.py deleted file mode 100644 index 3f7c87ba9..000000000 --- a/backend/dataall/aws/handlers/sns.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -import logging - -from botocore.exceptions import ClientError - -from .service_handlers import Worker -from .sts import SessionHelper -from ... import db -from ...db import models - -logger = logging.getLogger(__name__) - - -class Sns: - def __init__(self): - pass - - @staticmethod - @Worker.handler(path='sns.dataset.publish_update') - def publish_update(engine, task: models.Task): - with engine.scoped_session() as session: - dataset = db.api.Dataset.get_dataset_by_uri(session, task.targetUri) - environment = db.api.Environment.get_environment_by_uri( - session, dataset.environmentUri - ) - aws_session = SessionHelper.remote_session( - accountid=environment.AwsAccountId - ) - sns = aws_session.client('sns', region_name=environment.region) - message = { - 'prefix': task.payload['s3Prefix'], - 'accountid': environment.AwsAccountId, - 'region': environment.region, - 'bucket_name': dataset.S3BucketName, - } - try: - logger.info( - f'Sending dataset {dataset.datasetUri}|{message} update message for consumers' - ) - response = sns.publish( - TopicArn=f'arn:aws:sns:{environment.region}:{environment.AwsAccountId}:{environment.subscriptionsProducersTopicName}', - Message=json.dumps(message), - ) - return response - except ClientError as e: - logger.error( - f'Failed to deliver dataset ' - f'{dataset.datasetUri}|{message} ' - f'update message for consumers ' - f'due to: {e} ' - ) - raise e diff --git a/backend/dataall/aws/handlers/sqs.py b/backend/dataall/aws/handlers/sqs.py deleted file mode 100644 index 2540a07b5..000000000 --- a/backend/dataall/aws/handlers/sqs.py +++ /dev/null @@ -1,66 +0,0 @@ -import json -import logging -import os -import uuid - -import boto3 -from botocore.exceptions import ClientError - -from ...utils import Parameter - -logger = logging.getLogger(__name__) - - -class SqsQueue: - disabled = True - queue_url = None - - @classmethod - def configure_(cls, queue_url): - if queue_url: - cls.enable() - cls.queue_url = queue_url - else: - cls.disable() - - @classmethod - def disable(cls): - cls.disabled = True - - @classmethod - def enable(cls): - cls.disabled = False - - @classmethod - def get_envname(cls): - return os.environ.get('envname', 'local') - - @classmethod - def get_sqs_client(cls): - if not cls.disabled: - client = boto3.client( - 'sqs', region_name=os.getenv('AWS_REGION', 'eu-west-1') - ) - return client - - @classmethod - def send(cls, engine, task_ids: [str]): - cls.configure_( - Parameter().get_parameter(env=cls.get_envname(), path='sqs/queue_url') - ) - client = cls.get_sqs_client() - logger.debug(f'Sending task {task_ids} through SQS {cls.queue_url}') - try: - return client.send_message( - QueueUrl=cls.queue_url, - MessageBody=json.dumps(task_ids), - MessageGroupId=cls._get_random_message_id(), - MessageDeduplicationId=cls._get_random_message_id(), - ) - except ClientError as e: - logger.error(e) - raise e - - @classmethod - def _get_random_message_id(cls): - return str(uuid.uuid4()) diff --git a/backend/dataall/aws/handlers/stepfunction.py b/backend/dataall/aws/handlers/stepfunction.py deleted file mode 100644 index 9d497d63d..000000000 --- a/backend/dataall/aws/handlers/stepfunction.py +++ /dev/null @@ -1,42 +0,0 @@ -from .sts import SessionHelper -from ...db import models - - -def run_pipeline(state_machine_name, env: models.Environment, stage='Test'): - if not state_machine_name: - raise Exception( - 'An error occurred (StackNotFound) when calling the RUN PIPELINE operation' - ) - aws = SessionHelper.remote_session(env.AwsAccountId) - client = aws.client('stepfunctions', region_name=env.region) - arn = f'arn:aws:states:{env.region}:{env.AwsAccountId}:stateMachine:{state_machine_name}' - try: - client.describe_state_machine(stateMachineArn=arn) - except client.exceptions.StateMachineDoesNotExist: - raise Exception( - f'An error occurred (StateMachineNotFound) {arn} when calling the RUN PIPELINE operation' - ) - - response = client.start_execution(stateMachineArn=arn) - - return response['executionArn'] - - -def list_executions(state_machine_name, env: models.Environment, stage='Test'): - if not state_machine_name: - raise Exception( - 'An error occurred (StackNotFound) when calling the RUN PIPELINE operation' - ) - aws = SessionHelper.remote_session(env.AwsAccountId) - client = aws.client('stepfunctions', region_name=env.region) - arn = f'arn:aws:states:{env.region}:{env.AwsAccountId}:stateMachine:{state_machine_name}' - try: - client.describe_state_machine(stateMachineArn=arn) - except client.exceptions.StateMachineDoesNotExist: - print( - f'An error occurred (StateMachineNotFound) {arn} when calling the RUN PIPELINE operation' - ) - return [] - response = client.list_executions(stateMachineArn=arn, maxResults=100) - executions = response.get('executions', []) - return executions diff --git a/backend/dataall/aws/handlers/sts.py b/backend/dataall/aws/handlers/sts.py deleted file mode 100644 index dbfd414d0..000000000 --- a/backend/dataall/aws/handlers/sts.py +++ /dev/null @@ -1,362 +0,0 @@ -import json -import logging -import os -import urllib - -import boto3 -from botocore.client import Config -from botocore.exceptions import ClientError - -from dataall.version import __version__, __pkg_name__ - -try: - from urllib import quote_plus - from urllib2 import urlopen -except ImportError: - from urllib.parse import quote_plus - from urllib.request import urlopen - - -log = logging.getLogger(__name__) - - -class SessionHelper: - """SessionHelpers is a class simplifying common aws boto3 session tasks and helpers""" - - @classmethod - def get_session(cls, base_session=None, role_arn=None): - """Returns a boto3 session fo the given role - Args: - base_session(object,optional) : a boto3 session - role_arn(string, optional) : a role arn - Returns: - boto3.session.Session : a boto3 session - If neither base_session and role_arn is provided, returns a default boto3 session - If role_arn is provided, base_session should be a boto3 session on the aws accountid is defined - """ - if role_arn: - external_id_secret = cls.get_external_id_secret() - if external_id_secret: - assume_role_dict = dict( - RoleArn=role_arn, - RoleSessionName=role_arn.split('/')[1], - ExternalId=external_id_secret, - ) - else: - assume_role_dict = dict( - RoleArn=role_arn, - RoleSessionName=role_arn.split('/')[1], - ) - try: - region = os.getenv('AWS_REGION', 'eu-west-1') - sts = base_session.client( - 'sts', - config=Config(user_agent_extra=f'{__pkg_name__}/{__version__}'), - region_name=region, - endpoint_url=f"https://sts.{region}.amazonaws.com" - ) - response = sts.assume_role(**assume_role_dict) - return boto3.Session( - aws_access_key_id=response['Credentials']['AccessKeyId'], - aws_secret_access_key=response['Credentials']['SecretAccessKey'], - aws_session_token=response['Credentials']['SessionToken'], - ) - except ClientError as e: - log.error(f'Failed to assume role {role_arn} due to: {e} ') - raise e - - else: - return boto3.Session() - - @classmethod - def _get_parameter_value(cls, parameter_path=None): - """ - Method to get parameter from System Manager Parameter Store - :return: - :rtype: - """ - parameter_value = None - region = os.getenv('AWS_REGION', 'eu-west-1') - if not parameter_path: - raise Exception('Parameter name is None') - try: - session = SessionHelper.get_session() - client = session.client('ssm', region_name=region) - parameter_value = client.get_parameter(Name=parameter_path)['Parameter']['Value'] - log.debug(f'Found Parameter {parameter_path}|{parameter_value}') - except ClientError as e: - log.warning(f'Parameter {parameter_path} not found: {e}') - return parameter_value - - @classmethod - def get_external_id_secret(cls): - """ - External Id used to secure dataall pivot role - sts:AssumeRole operation on onboarded environments - :return: - :rtype: - """ - return SessionHelper._get_parameter_value( - parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/externalId') - - @classmethod - def get_delegation_role_name(cls): - """Returns the role name that this package assumes on remote accounts - Returns: - string: name of the assumed role - """ - return SessionHelper._get_parameter_value( - parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/pivotRoleName') - - @classmethod - def get_console_access_url(cls, boto3_session, region='eu-west-1', bucket=None, redshiftcluster=None): - """Returns an AWS Console access url for the boto3 session - Args: - boto3_session(object): a boto3 session - Returns: - String: aws federated access console url - """ - c = boto3_session.get_credentials() - json_string_with_temp_credentials = '{' - json_string_with_temp_credentials += '"sessionId":"' + c.access_key + '",' - json_string_with_temp_credentials += '"sessionKey":"' + c.secret_key + '",' - json_string_with_temp_credentials += '"sessionToken":"' + c.token + '"' - json_string_with_temp_credentials += '}' - - request_parameters = '?Action=getSigninToken' - # request_parameters = "&SessionDuration=43200" - request_parameters += '&Session=' + urllib.parse.quote_plus(json_string_with_temp_credentials) - request_url = 'https://signin.aws.amazon.com/federation' + request_parameters - - r = urllib.request.urlopen(request_url).read() - - signin_token = json.loads(r) - request_parameters = '?Action=login' - request_parameters += '&Issuer=Example.org' - if bucket: - request_parameters += '&Destination=' + quote_plus( - 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) - ) - - elif redshiftcluster: - request_parameters += '&Destination=' + quote_plus( - f'https://{region}.console.aws.amazon.com/redshiftv2/' f'home?region={region}#query-editor:' - ) - else: - request_parameters += '&Destination=' + urllib.parse.quote_plus(f'https://{region}.console.aws.amazon.com/') - request_parameters += '&SigninToken=' + signin_token['SigninToken'] - request_url = 'https://signin.aws.amazon.com/federation' + request_parameters - - # Send final URL to stdout - return request_url - - @classmethod - def get_delegation_role_arn(cls, accountid): - """Returns the name that will be assumed to perform IAM actions on a given AWS accountid - Args: - accountid(string) : aws account id - Returns: - string : arn of the delegation role on the target aws account id - """ - return 'arn:aws:iam::{}:role/{}'.format(accountid, cls.get_delegation_role_name()) - - @classmethod - def get_cdk_look_up_role_arn(cls, accountid, region): - """Returns the name that will be assumed to perform IAM actions on a given AWS accountid using CDK Toolkit role - Args: - accountid(string) : aws account id - Returns: - string : arn of the CDKToolkit role on the target aws account id - """ - log.info(f"Getting CDK look up role: arn:aws:iam::{accountid}:role/cdk-hnb659fds-lookup-role-{accountid}-{region}") - return 'arn:aws:iam::{}:role/cdk-hnb659fds-lookup-role-{}-{}'.format(accountid, accountid, region) - - @classmethod - def get_cdk_exec_role_arn(cls, accountid, region): - """Returns the name that will be assumed to perform IAM actions on a given AWS accountid using CDK Toolkit role - Args: - accountid(string) : aws account id - Returns: - string : arn of the CDKToolkit role on the target aws account id - """ - log.info(f"Getting CDK exec role: arn:aws:iam::{accountid}:role/cdk-hnb659fds-cfn-exec-role-{accountid}-{region}") - return 'arn:aws:iam::{}:role/cdk-hnb659fds-cfn-exec-role-{}-{}'.format(accountid, accountid, region) - - @classmethod - def get_delegation_role_id(cls, accountid): - """Returns the name that will be assumed to perform IAM actions on a given AWS accountid - Args: - accountid(string) : aws account id - Returns : - string : RoleId of the role - """ - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('iam', region_name='eu-west-1') - response = client.get_role(RoleName=cls.get_delegation_role_name()) - return response['Role']['RoleId'] - - @classmethod - def remote_session(cls, accountid, role=None): - """Creates a remote boto3 session on the remote AWS account , assuming the delegation Role - Args: - accountid(string) : aws account id - role(string) : arn of the IAM role to assume in the boto3 session - Returns : - boto3.session.Session: boto3 Session, on the target aws accountid, assuming the delegation role or a provided role - """ - base_session = cls.get_session() - if role: - log.info(f"Remote boto3 session using role={role} for account={accountid}") - role_arn = role - else: - log.info(f"Remote boto3 session using pivot role for account= {accountid}") - role_arn = cls.get_delegation_role_arn(accountid=accountid) - session = SessionHelper.get_session(base_session=base_session, role_arn=role_arn) - return session - - @classmethod - def get_account(cls, session=None): - """Returns the aws account id associated with the default session, or the provided session - Args: - session(object, optional) : boto3 session - Returns : - string: AWS Account id of the provided session, - or the default boto3 session is not session argument was provided - """ - if not session: - session = cls.get_session() - region = os.getenv('AWS_REGION', 'eu-west-1') - client = session.client( - 'sts', - region_name=region, - endpoint_url=f"https://sts.{region}.amazonaws.com" - ) - response = client.get_caller_identity() - return response['Account'] - - @classmethod - def get_organization_id(cls, session=None): - """Returns the organization id for the priovided session - Args: - session(object) : boto3 session - Returns - string : AWS organization id - """ - if not session: - session = cls.get_session() - client = session.client('organizations') - response = client.describe_organization() - return response['Organization']['Id'] - - @staticmethod - def get_role_id(accountid, name): - session = SessionHelper.remote_session(accountid=accountid) - client = session.client('iam') - try: - response = client.get_role(RoleName=name) - return response['Role']['RoleId'] - except ClientError: - return None - - @staticmethod - def extract_account_from_role_arn(arn): - """takes a role arn and returns its account id - Args : - arn(str) : role arn - Return : - str : account id or none if arn could not be parsed - """ - try: - return arn.split(':')[4] - except Exception: - return None - - @staticmethod - def extract_name_from_role_arn(arn): - """Extract the role name from a Role arn - Args : - arn(str) : role arn - Return : - str : name of the role, or none if arn could not be parsed - """ - try: - return arn.split('/')[-1] - except Exception: - return None - - @staticmethod - def filter_roles_in_account(accountid, arns): - """ - Filter roles in a given account - Args : - accountid(str) : aws account number - arns(list) : a list of arns - Return : - list : list of all arns within the account - """ - return [arn for arn in arns if SessionHelper.extract_account_from_role_arn(arn) == accountid] - - @staticmethod - def get_role_ids(accountid, arns): - """ - Returns the list of Role ids for the list of arns existing within the provided aws account number - Args : - accountid(str) : aws account number - arns(list) : a list of arns - Return : - list : list of Role ids for role which arn are in the same aws account - """ - arns_in_account = SessionHelper.filter_roles_in_account(accountid, arns) - potentially_none = [ - SessionHelper.get_role_id( - accountid=accountid, - name=SessionHelper.extract_name_from_role_arn(role_arn), - ) - for role_arn in arns_in_account - ] - return [roleid for roleid in potentially_none if roleid] - - @classmethod - def get_session_by_access_key_and_secret_key(cls, access_key_id, secret_key): - """Returns a boto3 session fo the access_key_id and secret_key - Args: - access_key_id(string,required) - secret_key(string, required) - Returns: - boto3.session.Session : a boto3 session - """ - if not access_key_id or not secret_key: - raise ValueError('Passed access_key_id and secret_key are invalid') - - return boto3.Session(aws_access_key_id=access_key_id, aws_secret_access_key=secret_key) - - @staticmethod - def generate_console_url(credentials, session_duration=None, region='eu-west-1', bucket=None): - json_string_with_temp_credentials = '{' - json_string_with_temp_credentials += '"sessionId":"' + credentials['AccessKeyId'] + '",' - json_string_with_temp_credentials += '"sessionKey":"' + credentials['SecretAccessKey'] + '",' - json_string_with_temp_credentials += '"sessionToken":"' + credentials['SessionToken'] + '"' - json_string_with_temp_credentials += '}' - - request_parameters = '?Action=getSigninToken' - if session_duration: - request_parameters += '&SessionDuration={}'.format(session_duration) - request_parameters += '&Session=' + quote_plus(json_string_with_temp_credentials) - request_url = 'https://signin.aws.amazon.com/federation' + request_parameters - - r = urlopen(request_url).read() - - signin_token = json.loads(r) - request_parameters = '?Action=login' - request_parameters += '&Issuer=Example.org' - if bucket: - request_parameters += '&Destination=' + quote_plus( - 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) - ) - else: - request_parameters += '&Destination=' + quote_plus('https://{}.console.aws.amazon.com/'.format(region)) - request_parameters += '&SigninToken=' + signin_token['SigninToken'] - request_url = 'https://signin.aws.amazon.com/federation' + request_parameters - - # Send final URL to stdout - return request_url diff --git a/backend/dataall/aws/__init__.py b/backend/dataall/base/__init__.py similarity index 100% rename from backend/dataall/aws/__init__.py rename to backend/dataall/base/__init__.py diff --git a/backend/dataall/base/api/__init__.py b/backend/dataall/base/api/__init__.py new file mode 100644 index 000000000..4aef8e332 --- /dev/null +++ b/backend/dataall/base/api/__init__.py @@ -0,0 +1,107 @@ +from argparse import Namespace + +from ariadne import ( + EnumType, + MutationType, + ObjectType, + UnionType, + QueryType, + gql as GQL, + make_executable_schema, +) + +from dataall.base.api import gql +from dataall.base.api.constants import GraphQLEnumMapper + + +def bootstrap(): + classes = { + gql.ObjectType: [], + gql.QueryField: [], + gql.MutationField: [], + gql.Enum: [], + gql.Union: [], + gql.InputType: [], + } + + Query = gql.ObjectType(name='Query', fields=classes[gql.QueryField]) + + Mutation = gql.ObjectType(name='Mutation', fields=classes[gql.MutationField]) + + for enumclass in GraphQLEnumMapper.__subclasses__(): + enumclass.toGraphQLEnum() + + for cls in classes.keys(): + if not cls.class_instances: # if there are no instances of cls registered in the app + continue + + for name in cls.class_instances['default'].keys(): + if cls.get_instance(name): + classes[cls].append(cls.get_instance(name)) + else: + raise Exception(f'Unknown Graphql Type :`{name}`') + + schema = gql.Schema( + types=classes[gql.ObjectType], + inputs=classes[gql.InputType], + enums=classes[gql.Enum], + unions=classes[gql.Union], + ) + return schema + + +def resolver_adapter(resolver): + def adapted(obj, info, **kwargs): + response = resolver( + context=Namespace( + engine=info.context['engine'], + username=info.context['username'], + groups=info.context['groups'], + schema=info.context['schema'], + ), + source=obj or None, + **kwargs, + ) + return response + + return adapted + + +def get_executable_schema(): + schema = bootstrap() + _types = [] + for _type in schema.types: + if _type.name == 'Query': + query = QueryType() + _types.append(query) + for field in _type.fields: + if field.resolver: + query.field(field.name)(resolver_adapter(field.resolver)) + elif _type.name == 'Mutation': + mutation = MutationType() + _types.append(mutation) + for field in _type.fields: + if field.resolver: + mutation.field(field.name)(resolver_adapter(field.resolver)) + else: + object_type = ObjectType(name=_type.name) + + for field in _type.fields: + if field.resolver: + object_type.field(field.name)(resolver_adapter(field.resolver)) + _types.append(object_type) + + _enums = [] + for enum in schema.enums: + d = {} + for k in enum.values: + d[k.name] = k.value + _enums.append(EnumType(enum.name, d)) + + _unions = [] + for union in schema.unions: + _unions.append(UnionType(union.name, union.resolver)) + + type_defs = GQL(schema.gql(with_directives=False)) + executable_schema = make_executable_schema(type_defs, *(_types + _enums + _unions)) + return executable_schema diff --git a/backend/dataall/base/api/constants.py b/backend/dataall/base/api/constants.py new file mode 100644 index 000000000..f0edc5e68 --- /dev/null +++ b/backend/dataall/base/api/constants.py @@ -0,0 +1,30 @@ +from enum import Enum +from dataall.base.api import gql + + +class GraphQLEnumMapper(Enum): + @classmethod + def toGraphQLEnum(cls): + return gql.Enum(name=cls.__name__, values=cls) + + @classmethod + def to_value(cls, label): + for c in cls: + if c.name == label: + return c.value + return None + + @classmethod + def to_label(cls, value): + for c in cls: + if getattr(cls, c.name).value == value: + return c.name + return None + + +class SortDirection(GraphQLEnumMapper): + asc = 'asc' + desc = 'desc' + + +GLUEBUSINESSPROPERTIES = ['EXAMPLE_GLUE_PROPERTY_TO_BE_ADDED_ON_ES'] diff --git a/backend/dataall/base/api/context.py b/backend/dataall/base/api/context.py new file mode 100644 index 000000000..238627a81 --- /dev/null +++ b/backend/dataall/base/api/context.py @@ -0,0 +1,10 @@ +class Context: + def __init__( + self, + engine=None, + username=None, + groups=None, + ): + self.engine = engine + self.username = username + self.groups = groups diff --git a/backend/dataall/base/api/gql/README.md b/backend/dataall/base/api/gql/README.md new file mode 100644 index 000000000..16ae213d3 --- /dev/null +++ b/backend/dataall/base/api/gql/README.md @@ -0,0 +1,174 @@ +> `gql ` is a tiny package for code-first development of GraphQL APIs. +It allows developers to define their schema using a pythonic interface, and +brings a simple visitor mechanism for schema extension. + +# Simple example + +`gql` maps GraphQL constructs to Python classes, that can be defined, manipulated and extended. + +```python +import dataall.base.api.gql as gql + +Post = gql.ObjectType( + name="Post", + fields=[ + gql.Field(name="id", type=gql.String), + gql.Field(name="name", type=gql.NonNullableType(gql.String)), + gql.Field(name="comments", type=gql.ArrayType(gql.Thunk(lambda: PostComment))) + ] +) + +PostComment = gql.ObjectType( + name="PostComment", + fields=[ + gql.Field(name="post", type=Post), + gql.Field(name="id", type=gql.String), + gql.Feld(name="comment", type=gql.String) + ] +) + +Query = gql.ObjectType( + name="Query", + fields=[ + gql.Field( + name="getPostById", + args=[gql.Argument(name="postId", type=gql.String)], + type=Post + ) + ] +) + +schema = gql.Schema(types=[Post, PostComment, Query]) +print(schema.gql()) +``` + + +This will output a valid GraphQL schema +```graphql + +type Post { + id : String +name : String! +comments : [PostComment] + } + + + +type PostComment { + post : Post +id : String +comment : String + } + + + + +``` + + # Api + ## gql.Scalar + +Scalar GraphQL types are defined with the following Scalar instances: +``` +import dataall.gql as gql +gql.ID +gql.String +gql.Boolean +gql.Integer +gql.Number +gql.Date +gql.AWSDateTime +``` + + + ## Type Modifiers + +Types can be modified using gql Type modifiers. +Type modifiers can be applied for any valid GraphQL type, including scalar and ObjecType . + +#### `gql.ArrayType(type)` +Defines an array from the provided type + +```python +import dataall.base.api.gql as gql + +gql.ArrayType(gql.String) # will output [String] + +Foo = gql.ObjectType(name="Foo", fields=[gql.Field(name="id", type=gql.String)]) +gql.ArrayType(Foo) # will output [Foo] + +``` + + + +#### `gql.NonNullableType(type)` +Defines a required type from the provided type + +```python +import dataall.base.api.gql as gql + +gql.NonNullableType(gql.String) # will output String! + +``` + + +## gql.Field + +`gql.Field` defines a GraphQL Field + +### Methods + +#### **constructor** `gql.Field(name, type, args, directives)` +- `name (String)` : name of the field +- `type(gql.Scalar, gql.TypeModifier,gql.ObjectType,gql.Thunk)`: the type of the field +- `args(list(gql.Argument))` **optional**: A list of gql.Argument, defining GraphQL arguments +- `directives(list(gql.DirectiveArgs))` : A list of field Directive arguments + +```python +import dataall.base.api.gql as gql + +Child = gql.ObjectType(name="Child", fields=[gql.Field(name="id", type=gql.String)]) +# A simple field +id = gql.Field(name="id", type=gql.NonNullableType(gql.String)) +print(id.gql()) # id : String! + +# A field with arguments +listChildren = gql.Field( + name="listChildren", + type=gql.ArrayType(Child), + args=[gql.Argument(name="childName", type=gql.String)] +) # listChildren(childName:String) : [Child] + +# A field with directives + +directiveField = gql.Field( + name="directiveField", + type=gql.String, + directives=[gql.DirectiveArgs(name="required")] +) # directiveField : String @required + +``` + +#### `gql.Field.directive(name)` +Returns the `gql.DirectiveArgs` instance with the provided name, or `None` if the field does not have a directive with the provided name + + +#### `gql.Field.has_directive(name)` +Returns `True` if the field has a directive named `name`, or False if the field has no directive named `name`. + +### Properties +- `type` : the Field type +- `name` : the Field name +- `args` : the Field argument list, defaults to [] +- `directives` : the Field directive list, defaults to [] + +The +#### `gql.Field.gql(with_directive=True)` +Returns a gql representation of the field. + + ## gql.ObjectType + + ## gql.Thunk + + + ## gql.Thunk diff --git a/backend/dataall/api/gql/__init__.py b/backend/dataall/base/api/gql/__init__.py similarity index 100% rename from backend/dataall/api/gql/__init__.py rename to backend/dataall/base/api/gql/__init__.py diff --git a/backend/dataall/api/gql/_cache.py b/backend/dataall/base/api/gql/_cache.py similarity index 100% rename from backend/dataall/api/gql/_cache.py rename to backend/dataall/base/api/gql/_cache.py diff --git a/backend/dataall/api/gql/default_resolver.py b/backend/dataall/base/api/gql/default_resolver.py similarity index 100% rename from backend/dataall/api/gql/default_resolver.py rename to backend/dataall/base/api/gql/default_resolver.py diff --git a/backend/dataall/api/gql/graphql_argument.py b/backend/dataall/base/api/gql/graphql_argument.py similarity index 100% rename from backend/dataall/api/gql/graphql_argument.py rename to backend/dataall/base/api/gql/graphql_argument.py diff --git a/backend/dataall/api/gql/graphql_directive.py b/backend/dataall/base/api/gql/graphql_directive.py similarity index 100% rename from backend/dataall/api/gql/graphql_directive.py rename to backend/dataall/base/api/gql/graphql_directive.py diff --git a/backend/dataall/api/gql/graphql_enum.py b/backend/dataall/base/api/gql/graphql_enum.py similarity index 100% rename from backend/dataall/api/gql/graphql_enum.py rename to backend/dataall/base/api/gql/graphql_enum.py diff --git a/backend/dataall/api/gql/graphql_field.py b/backend/dataall/base/api/gql/graphql_field.py similarity index 100% rename from backend/dataall/api/gql/graphql_field.py rename to backend/dataall/base/api/gql/graphql_field.py diff --git a/backend/dataall/api/gql/graphql_input.py b/backend/dataall/base/api/gql/graphql_input.py similarity index 100% rename from backend/dataall/api/gql/graphql_input.py rename to backend/dataall/base/api/gql/graphql_input.py diff --git a/backend/dataall/api/gql/graphql_interface.py b/backend/dataall/base/api/gql/graphql_interface.py similarity index 100% rename from backend/dataall/api/gql/graphql_interface.py rename to backend/dataall/base/api/gql/graphql_interface.py diff --git a/backend/dataall/api/gql/graphql_mutation_field.py b/backend/dataall/base/api/gql/graphql_mutation_field.py similarity index 100% rename from backend/dataall/api/gql/graphql_mutation_field.py rename to backend/dataall/base/api/gql/graphql_mutation_field.py diff --git a/backend/dataall/api/gql/graphql_query_field.py b/backend/dataall/base/api/gql/graphql_query_field.py similarity index 100% rename from backend/dataall/api/gql/graphql_query_field.py rename to backend/dataall/base/api/gql/graphql_query_field.py diff --git a/backend/dataall/api/gql/graphql_scalar.py b/backend/dataall/base/api/gql/graphql_scalar.py similarity index 100% rename from backend/dataall/api/gql/graphql_scalar.py rename to backend/dataall/base/api/gql/graphql_scalar.py diff --git a/backend/dataall/api/gql/graphql_type.py b/backend/dataall/base/api/gql/graphql_type.py similarity index 100% rename from backend/dataall/api/gql/graphql_type.py rename to backend/dataall/base/api/gql/graphql_type.py diff --git a/backend/dataall/api/gql/graphql_type_modifiers.py b/backend/dataall/base/api/gql/graphql_type_modifiers.py similarity index 100% rename from backend/dataall/api/gql/graphql_type_modifiers.py rename to backend/dataall/base/api/gql/graphql_type_modifiers.py diff --git a/backend/dataall/base/api/gql/graphql_union_type.py b/backend/dataall/base/api/gql/graphql_union_type.py new file mode 100644 index 000000000..aded31c91 --- /dev/null +++ b/backend/dataall/base/api/gql/graphql_union_type.py @@ -0,0 +1,39 @@ +from abc import ABC + +from ._cache import cache_instances +from .utils import get_named_type + + +class UnionTypeRegistry(ABC): + """An abstract class that is used to provide union type in runtime""" + + @classmethod + def types(cls): + raise NotImplementedError("Types method is not implemented") + + +@cache_instances +class Union: + _register = {} + + def __init__(self, name, types=[], type_registry=None, resolver=lambda *_, **__: None): + self.name = name + self.types = types + self.type_registry = type_registry + self.resolver = resolver + Union._register[name] = self + + def gql(self, *args, **kwargs): + types = self.type_registry.types() if self.type_registry else self.types + return f"union {self.name} = {'|'.join([get_named_type(t).name for t in types])}" + + +if __name__ == '__main__': + from dataall.base.api import gql + + User = gql.ObjectType(name='User', fields=[]) + + Group = gql.ObjectType(name='Group', fields=[]) + userorgroup = Union(name='userorgroup', types=[gql.Thunk(lambda: User), Group]) + + print(userorgroup.gql()) diff --git a/backend/dataall/api/gql/ref.py b/backend/dataall/base/api/gql/ref.py similarity index 100% rename from backend/dataall/api/gql/ref.py rename to backend/dataall/base/api/gql/ref.py diff --git a/backend/dataall/api/gql/schema.py b/backend/dataall/base/api/gql/schema.py similarity index 100% rename from backend/dataall/api/gql/schema.py rename to backend/dataall/base/api/gql/schema.py diff --git a/backend/dataall/api/gql/thunk.py b/backend/dataall/base/api/gql/thunk.py similarity index 100% rename from backend/dataall/api/gql/thunk.py rename to backend/dataall/base/api/gql/thunk.py diff --git a/backend/dataall/api/gql/utils.py b/backend/dataall/base/api/gql/utils.py similarity index 100% rename from backend/dataall/api/gql/utils.py rename to backend/dataall/base/api/gql/utils.py diff --git a/backend/dataall/api/gql/visitor.py b/backend/dataall/base/api/gql/visitor.py similarity index 100% rename from backend/dataall/api/gql/visitor.py rename to backend/dataall/base/api/gql/visitor.py diff --git a/backend/dataall/aws/handlers/__init__.py b/backend/dataall/base/aws/__init__.py similarity index 100% rename from backend/dataall/aws/handlers/__init__.py rename to backend/dataall/base/aws/__init__.py diff --git a/backend/dataall/aws/handlers/iam.py b/backend/dataall/base/aws/iam.py similarity index 100% rename from backend/dataall/aws/handlers/iam.py rename to backend/dataall/base/aws/iam.py diff --git a/backend/dataall/aws/handlers/parameter_store.py b/backend/dataall/base/aws/parameter_store.py similarity index 100% rename from backend/dataall/aws/handlers/parameter_store.py rename to backend/dataall/base/aws/parameter_store.py diff --git a/backend/dataall/base/aws/quicksight.py b/backend/dataall/base/aws/quicksight.py new file mode 100644 index 000000000..e7c59dfb0 --- /dev/null +++ b/backend/dataall/base/aws/quicksight.py @@ -0,0 +1,147 @@ +import logging +import re + +from .sts import SessionHelper + +logger = logging.getLogger('QuicksightHandler') +logger.setLevel(logging.DEBUG) + + +class QuicksightClient: + + DEFAULT_GROUP_NAME = 'dataall' + + def __init__(self): + pass + + @staticmethod + def get_quicksight_client(AwsAccountId, region='eu-west-1'): + """Returns a boto3 quicksight client in the provided account/region + Args: + AwsAccountId(str) : aws account id + region(str) : aws region + Returns : boto3.client ("quicksight") + """ + session = SessionHelper.remote_session(accountid=AwsAccountId) + return session.client('quicksight', region_name=region) + + @staticmethod + def get_identity_region(AwsAccountId): + """Quicksight manages identities in one region, and there is no API to retrieve it + However, when using Quicksight user/group apis in the wrong region, + the client will throw and exception showing the region Quicksight's using as its + identity region. + Args: + AwsAccountId(str) : aws account id + Returns: str + the region quicksight uses as identity region + """ + identity_region_rex = re.compile('Please use the (?P.*) endpoint.') + identity_region = 'us-east-1' + client = QuicksightClient.get_quicksight_client(AwsAccountId=AwsAccountId, region=identity_region) + try: + response = client.describe_group( + AwsAccountId=AwsAccountId, GroupName=QuicksightClient.DEFAULT_GROUP_NAME, Namespace='default' + ) + except client.exceptions.AccessDeniedException as e: + match = identity_region_rex.findall(str(e)) + if match: + identity_region = match[0] + else: + raise e + except client.exceptions.ResourceNotFoundException: + pass + return identity_region + + @staticmethod + def get_quicksight_client_in_identity_region(AwsAccountId): + """Returns a boto3 quicksight client in the Quicksight identity region for the provided account + Args: + AwsAccountId(str) : aws account id + Returns : boto3.client ("quicksight") + + """ + identity_region = QuicksightClient.get_identity_region(AwsAccountId) + session = SessionHelper.remote_session(accountid=AwsAccountId) + return session.client('quicksight', region_name=identity_region) + + @staticmethod + def check_quicksight_enterprise_subscription(AwsAccountId, region=None): + """Use the DescribeAccountSubscription operation to receive a description of a Amazon QuickSight account's subscription. A successful API call returns an AccountInfo object that includes an account's name, subscription status, authentication type, edition, and notification email address. + Args: + AwsAccountId(str) : aws account id + region(str): aws region + Returns: bool + True if Quicksight Enterprise Edition is enabled in the AWS Account + """ + logger.info(f'Checking Quicksight subscription in AWS account = {AwsAccountId}') + client = QuicksightClient.get_quicksight_client(AwsAccountId=AwsAccountId, region=region) + try: + response = client.describe_account_subscription(AwsAccountId=AwsAccountId) + if not response['AccountInfo']: + raise Exception(f'Quicksight Enterprise Subscription not found in Account: {AwsAccountId}') + else: + if response['AccountInfo']['Edition'] not in ['ENTERPRISE', 'ENTERPRISE_AND_Q']: + raise Exception( + f"Quicksight Subscription found in Account: {AwsAccountId} of incorrect type: {response['AccountInfo']['Edition']}") + else: + if response['AccountInfo']['AccountSubscriptionStatus'] == 'ACCOUNT_CREATED': + return True + else: + raise Exception( + f"Quicksight Subscription found in Account: {AwsAccountId} not active. Status = {response['AccountInfo']['AccountSubscriptionStatus']}") + + except client.exceptions.ResourceNotFoundException: + raise Exception('Quicksight Enterprise Subscription not found') + + except client.exceptions.AccessDeniedException: + raise Exception('Access denied to Quicksight for selected role') + return False + + @staticmethod + def create_quicksight_group(AwsAccountId, GroupName=DEFAULT_GROUP_NAME): + """Creates a Quicksight group called GroupName + Args: + AwsAccountId(str): aws account + GroupName(str): name of the QS group + + Returns:dict + quicksight.describe_group response + """ + client = QuicksightClient.get_quicksight_client_in_identity_region(AwsAccountId) + group = QuicksightClient.describe_group(client, AwsAccountId, GroupName) + if not group: + if GroupName == QuicksightClient.DEFAULT_GROUP_NAME: + logger.info(f'Initializing data.all default group = {GroupName}') + QuicksightClient.check_quicksight_enterprise_subscription(AwsAccountId) + + logger.info(f'Attempting to create Quicksight group `{GroupName}...') + response = client.create_group( + GroupName=GroupName, + Description='data.all group', + AwsAccountId=AwsAccountId, + Namespace='default', + ) + logger.info(f'Quicksight group {GroupName} created {response}') + response = client.describe_group( + AwsAccountId=AwsAccountId, GroupName=GroupName, Namespace='default' + ) + return response + return group + + @staticmethod + def describe_group(client, AwsAccountId, GroupName=DEFAULT_GROUP_NAME): + try: + response = client.describe_group( + AwsAccountId=AwsAccountId, GroupName=GroupName, Namespace='default' + ) + logger.info( + f'Quicksight {GroupName} group already exists in {AwsAccountId} ' + f'(using identity region {QuicksightClient.get_identity_region(AwsAccountId)}): ' + f'{response}' + ) + return response + except client.exceptions.ResourceNotFoundException: + logger.info( + f'Creating Quicksight group in {AwsAccountId} (using identity region {QuicksightClient.get_identity_region(AwsAccountId)})' + ) diff --git a/backend/dataall/base/aws/secrets_manager.py b/backend/dataall/base/aws/secrets_manager.py new file mode 100644 index 000000000..42275ca64 --- /dev/null +++ b/backend/dataall/base/aws/secrets_manager.py @@ -0,0 +1,29 @@ +import logging +import os + +import boto3 +from botocore.exceptions import ClientError + +from .sts import SessionHelper + +log = logging.getLogger(__name__) + +_DEFAULT_REGION = os.environ.get('AWS_REGION', 'eu-west-1') + + +class SecretsManager: + def __init__(self, account_id=None, region=_DEFAULT_REGION): + if account_id: + session = SessionHelper.remote_session(account_id) + self._client = session.client('secretsmanager', region_name=region) + else: + self._client = boto3.client('secretsmanager', region_name=region) + + def get_secret_value(self, secret_id): + if not secret_id: + raise Exception('Secret name is None') + try: + secret_value = self._client.get_secret_value(SecretId=secret_id)['SecretString'] + except ClientError as e: + raise Exception(e) + return secret_value diff --git a/backend/dataall/base/aws/sqs.py b/backend/dataall/base/aws/sqs.py new file mode 100644 index 000000000..9877d1d03 --- /dev/null +++ b/backend/dataall/base/aws/sqs.py @@ -0,0 +1,66 @@ +import json +import logging +import os +import uuid + +import boto3 +from botocore.exceptions import ClientError + +from dataall.base.utils import Parameter + +logger = logging.getLogger(__name__) + + +class SqsQueue: + disabled = True + queue_url = None + + @classmethod + def configure_(cls, queue_url): + if queue_url: + cls.enable() + cls.queue_url = queue_url + else: + cls.disable() + + @classmethod + def disable(cls): + cls.disabled = True + + @classmethod + def enable(cls): + cls.disabled = False + + @classmethod + def get_envname(cls): + return os.environ.get('envname', 'local') + + @classmethod + def get_sqs_client(cls): + if not cls.disabled: + client = boto3.client( + 'sqs', region_name=os.getenv('AWS_REGION', 'eu-west-1') + ) + return client + + @classmethod + def send(cls, engine, task_ids: [str]): + cls.configure_( + Parameter().get_parameter(env=cls.get_envname(), path='sqs/queue_url') + ) + client = cls.get_sqs_client() + logger.debug(f'Sending task {task_ids} through SQS {cls.queue_url}') + try: + return client.send_message( + QueueUrl=cls.queue_url, + MessageBody=json.dumps(task_ids), + MessageGroupId=cls._get_random_message_id(), + MessageDeduplicationId=cls._get_random_message_id(), + ) + except ClientError as e: + logger.error(e) + raise e + + @classmethod + def _get_random_message_id(cls): + return str(uuid.uuid4()) diff --git a/backend/dataall/base/aws/sts.py b/backend/dataall/base/aws/sts.py new file mode 100644 index 000000000..fd4641076 --- /dev/null +++ b/backend/dataall/base/aws/sts.py @@ -0,0 +1,357 @@ +import json +import logging +import os +import urllib + +import boto3 +from botocore.client import Config +from botocore.exceptions import ClientError + +from dataall.version import __version__, __pkg_name__ + +try: + from urllib import quote_plus + from urllib2 import urlopen +except ImportError: + from urllib.parse import quote_plus + from urllib.request import urlopen + + +log = logging.getLogger(__name__) + + +class SessionHelper: + """SessionHelpers is a class simplifying common aws boto3 session tasks and helpers""" + + @classmethod + def get_session(cls, base_session=None, role_arn=None): + """Returns a boto3 session fo the given role + Args: + base_session(object,optional) : a boto3 session + role_arn(string, optional) : a role arn + Returns: + boto3.session.Session : a boto3 session + If neither base_session and role_arn is provided, returns a default boto3 session + If role_arn is provided, base_session should be a boto3 session on the aws accountid is defined + """ + if role_arn: + external_id_secret = cls.get_external_id_secret() + if external_id_secret: + assume_role_dict = dict( + RoleArn=role_arn, + RoleSessionName=role_arn.split('/')[1], + ExternalId=external_id_secret, + ) + else: + assume_role_dict = dict( + RoleArn=role_arn, + RoleSessionName=role_arn.split('/')[1], + ) + try: + region = os.getenv('AWS_REGION', 'eu-west-1') + sts = base_session.client( + 'sts', + config=Config(user_agent_extra=f'{__pkg_name__}/{__version__}'), + region_name=region, + endpoint_url=f"https://sts.{region}.amazonaws.com" + ) + response = sts.assume_role(**assume_role_dict) + return boto3.Session( + aws_access_key_id=response['Credentials']['AccessKeyId'], + aws_secret_access_key=response['Credentials']['SecretAccessKey'], + aws_session_token=response['Credentials']['SessionToken'], + ) + except ClientError as e: + log.error(f'Failed to assume role {role_arn} due to: {e} ') + raise e + + else: + return boto3.Session() + + @classmethod + def _get_parameter_value(cls, parameter_path=None): + """ + Method to get parameter from System Manager Parameter Store + :return: + :rtype: + """ + parameter_value = None + region = os.getenv('AWS_REGION', 'eu-west-1') + if not parameter_path: + raise Exception('Parameter name is None') + try: + session = SessionHelper.get_session() + client = session.client('ssm', region_name=region) + parameter_value = client.get_parameter(Name=parameter_path)['Parameter']['Value'] + log.debug(f'Found Parameter {parameter_path}|{parameter_value}') + except ClientError as e: + log.warning(f'Parameter {parameter_path} not found: {e}') + return parameter_value + + @classmethod + def get_external_id_secret(cls): + """ + External Id used to secure dataall pivot role + sts:AssumeRole operation on onboarded environments + :return: + :rtype: + """ + return SessionHelper._get_parameter_value( + parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/externalId') + + @classmethod + def get_delegation_role_name(cls): + """Returns the role name that this package assumes on remote accounts + Returns: + string: name of the assumed role + """ + return SessionHelper._get_parameter_value( + parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/pivotRoleName') + + @classmethod + def get_console_access_url(cls, boto3_session, region='eu-west-1', bucket=None): + """Returns an AWS Console access url for the boto3 session + Args: + boto3_session(object): a boto3 session + Returns: + String: aws federated access console url + """ + c = boto3_session.get_credentials() + json_string_with_temp_credentials = '{' + json_string_with_temp_credentials += '"sessionId":"' + c.access_key + '",' + json_string_with_temp_credentials += '"sessionKey":"' + c.secret_key + '",' + json_string_with_temp_credentials += '"sessionToken":"' + c.token + '"' + json_string_with_temp_credentials += '}' + + request_parameters = '?Action=getSigninToken' + # request_parameters = "&SessionDuration=43200" + request_parameters += '&Session=' + urllib.parse.quote_plus(json_string_with_temp_credentials) + request_url = 'https://signin.aws.amazon.com/federation' + request_parameters + + r = urllib.request.urlopen(request_url).read() + + signin_token = json.loads(r) + request_parameters = '?Action=login' + request_parameters += '&Issuer=Example.org' + if bucket: + request_parameters += '&Destination=' + quote_plus( + 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) + ) + else: + request_parameters += '&Destination=' + urllib.parse.quote_plus(f'https://{region}.console.aws.amazon.com/') + request_parameters += '&SigninToken=' + signin_token['SigninToken'] + request_url = 'https://signin.aws.amazon.com/federation' + request_parameters + + # Send final URL to stdout + return request_url + + @classmethod + def get_delegation_role_arn(cls, accountid): + """Returns the name that will be assumed to perform IAM actions on a given AWS accountid + Args: + accountid(string) : aws account id + Returns: + string : arn of the delegation role on the target aws account id + """ + return 'arn:aws:iam::{}:role/{}'.format(accountid, cls.get_delegation_role_name()) + + @classmethod + def get_cdk_look_up_role_arn(cls, accountid, region): + """Returns the name that will be assumed to perform IAM actions on a given AWS accountid using CDK Toolkit role + Args: + accountid(string) : aws account id + Returns: + string : arn of the CDKToolkit role on the target aws account id + """ + log.info(f"Getting CDK look up role: arn:aws:iam::{accountid}:role/cdk-hnb659fds-lookup-role-{accountid}-{region}") + return 'arn:aws:iam::{}:role/cdk-hnb659fds-lookup-role-{}-{}'.format(accountid, accountid, region) + + @classmethod + def get_cdk_exec_role_arn(cls, accountid, region): + """Returns the name that will be assumed to perform IAM actions on a given AWS accountid using CDK Toolkit role + Args: + accountid(string) : aws account id + Returns: + string : arn of the CDKToolkit role on the target aws account id + """ + log.info(f"Getting CDK exec role: arn:aws:iam::{accountid}:role/cdk-hnb659fds-cfn-exec-role-{accountid}-{region}") + return 'arn:aws:iam::{}:role/cdk-hnb659fds-cfn-exec-role-{}-{}'.format(accountid, accountid, region) + + @classmethod + def get_delegation_role_id(cls, accountid): + """Returns the name that will be assumed to perform IAM actions on a given AWS accountid + Args: + accountid(string) : aws account id + Returns : + string : RoleId of the role + """ + session = SessionHelper.remote_session(accountid=accountid) + client = session.client('iam', region_name='eu-west-1') + response = client.get_role(RoleName=cls.get_delegation_role_name()) + return response['Role']['RoleId'] + + @classmethod + def remote_session(cls, accountid, role=None): + """Creates a remote boto3 session on the remote AWS account , assuming the delegation Role + Args: + accountid(string) : aws account id + role(string) : arn of the IAM role to assume in the boto3 session + Returns : + boto3.session.Session: boto3 Session, on the target aws accountid, assuming the delegation role or a provided role + """ + base_session = cls.get_session() + if role: + log.info(f"Remote boto3 session using role={role} for account={accountid}") + role_arn = role + else: + log.info(f"Remote boto3 session using pivot role for account= {accountid}") + role_arn = cls.get_delegation_role_arn(accountid=accountid) + session = SessionHelper.get_session(base_session=base_session, role_arn=role_arn) + return session + + @classmethod + def get_account(cls, session=None): + """Returns the aws account id associated with the default session, or the provided session + Args: + session(object, optional) : boto3 session + Returns : + string: AWS Account id of the provided session, + or the default boto3 session is not session argument was provided + """ + if not session: + session = cls.get_session() + region = os.getenv('AWS_REGION', 'eu-west-1') + client = session.client( + 'sts', + region_name=region, + endpoint_url=f"https://sts.{region}.amazonaws.com" + ) + response = client.get_caller_identity() + return response['Account'] + + @classmethod + def get_organization_id(cls, session=None): + """Returns the organization id for the priovided session + Args: + session(object) : boto3 session + Returns + string : AWS organization id + """ + if not session: + session = cls.get_session() + client = session.client('organizations') + response = client.describe_organization() + return response['Organization']['Id'] + + @staticmethod + def get_role_id(accountid, name): + session = SessionHelper.remote_session(accountid=accountid) + client = session.client('iam') + try: + response = client.get_role(RoleName=name) + return response['Role']['RoleId'] + except ClientError: + return None + + @staticmethod + def extract_account_from_role_arn(arn): + """takes a role arn and returns its account id + Args : + arn(str) : role arn + Return : + str : account id or none if arn could not be parsed + """ + try: + return arn.split(':')[4] + except Exception: + return None + + @staticmethod + def extract_name_from_role_arn(arn): + """Extract the role name from a Role arn + Args : + arn(str) : role arn + Return : + str : name of the role, or none if arn could not be parsed + """ + try: + return arn.split('/')[-1] + except Exception: + return None + + @staticmethod + def filter_roles_in_account(accountid, arns): + """ + Filter roles in a given account + Args : + accountid(str) : aws account number + arns(list) : a list of arns + Return : + list : list of all arns within the account + """ + return [arn for arn in arns if SessionHelper.extract_account_from_role_arn(arn) == accountid] + + @staticmethod + def get_role_ids(accountid, arns): + """ + Returns the list of Role ids for the list of arns existing within the provided aws account number + Args : + accountid(str) : aws account number + arns(list) : a list of arns + Return : + list : list of Role ids for role which arn are in the same aws account + """ + arns_in_account = SessionHelper.filter_roles_in_account(accountid, arns) + potentially_none = [ + SessionHelper.get_role_id( + accountid=accountid, + name=SessionHelper.extract_name_from_role_arn(role_arn), + ) + for role_arn in arns_in_account + ] + return [roleid for roleid in potentially_none if roleid] + + @classmethod + def get_session_by_access_key_and_secret_key(cls, access_key_id, secret_key): + """Returns a boto3 session fo the access_key_id and secret_key + Args: + access_key_id(string,required) + secret_key(string, required) + Returns: + boto3.session.Session : a boto3 session + """ + if not access_key_id or not secret_key: + raise ValueError('Passed access_key_id and secret_key are invalid') + + return boto3.Session(aws_access_key_id=access_key_id, aws_secret_access_key=secret_key) + + @staticmethod + def generate_console_url(credentials, session_duration=None, region='eu-west-1', bucket=None): + json_string_with_temp_credentials = '{' + json_string_with_temp_credentials += '"sessionId":"' + credentials['AccessKeyId'] + '",' + json_string_with_temp_credentials += '"sessionKey":"' + credentials['SecretAccessKey'] + '",' + json_string_with_temp_credentials += '"sessionToken":"' + credentials['SessionToken'] + '"' + json_string_with_temp_credentials += '}' + + request_parameters = '?Action=getSigninToken' + if session_duration: + request_parameters += '&SessionDuration={}'.format(session_duration) + request_parameters += '&Session=' + quote_plus(json_string_with_temp_credentials) + request_url = 'https://signin.aws.amazon.com/federation' + request_parameters + + r = urlopen(request_url).read() + + signin_token = json.loads(r) + request_parameters = '?Action=login' + request_parameters += '&Issuer=Example.org' + if bucket: + request_parameters += '&Destination=' + quote_plus( + 'https://{}.console.aws.amazon.com/s3/buckets/{}/'.format(region, bucket) + ) + else: + request_parameters += '&Destination=' + quote_plus('https://{}.console.aws.amazon.com/'.format(region)) + request_parameters += '&SigninToken=' + signin_token['SigninToken'] + request_url = 'https://signin.aws.amazon.com/federation' + request_parameters + + # Send final URL to stdout + return request_url diff --git a/backend/dataall/base/cdkproxy/README.md b/backend/dataall/base/cdkproxy/README.md new file mode 100644 index 000000000..4175cf20d --- /dev/null +++ b/backend/dataall/base/cdkproxy/README.md @@ -0,0 +1,50 @@ + + +## About +`cdkproxy` is a package that exposes a REST API to run pre-defined +cloudformation stacks using the aws cdk package. + +It is deployed as a docker container running on AWS ECS. + +## How it works + +cdkproxy exposes a REST API to manage pre-defined stacks. +It reads and updates tasks from the dataall database. +Somes APIs are run asynchrnously , returning an id for subsequent reads. +Some APIs are run synchronously. + + +Pre-defined cdk stacks are defined in the stack package. +To register a pre-defined stack, use the `@stack` decorator as in the example below : + +```python + +from aws_cdk import ( + aws_s3 as s3, + aws_sqs as sqs, + core +) +from dataall.base.cdkproxy.stacks import stack + +@stack(stack="mypredefinedstack") +class MyPredefinedStack(core.Stack): + def __init__(self, scope, id, **kwargs): + super().__init__(scope, id, **kwargs) + #constructs goes here + +``` + + +## Local setup + +### pre requisites + +1. You must have docker installed +2. You must have ~/.aws folder with your aws credentials + +### build the image +At the root folder: +`docker build --network=host -t cdkproxy:latest . ` + +### Run the image +`docker run --network host -p 8080:8080 -v /home/moshir/.aws:/root/.aws:ro --name cdkproxy cdkproxy:latest ` diff --git a/backend/dataall/cdkproxy/__init__.py b/backend/dataall/base/cdkproxy/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/__init__.py rename to backend/dataall/base/cdkproxy/__init__.py diff --git a/backend/dataall/cdkproxy/app.py b/backend/dataall/base/cdkproxy/app.py similarity index 91% rename from backend/dataall/cdkproxy/app.py rename to backend/dataall/base/cdkproxy/app.py index cf208f7fb..080295841 100644 --- a/backend/dataall/cdkproxy/app.py +++ b/backend/dataall/base/cdkproxy/app.py @@ -6,17 +6,20 @@ from aws_cdk import Environment, App from tabulate import tabulate -from dataall.cdkproxy.stacks import instanciate_stack +from dataall.base.cdkproxy.stacks import instanciate_stack +from dataall.base.loader import load_modules, ImportMode print(sys.version) logger = logging.getLogger('cdkapp process') logger.setLevel('INFO') +load_modules(modes={ImportMode.CDK}) + class CdkRunner: @staticmethod def create(): - logger.info('Creating Stack') + logger.info('Ï') app = App() # 1. Reading info from context # 1.1 Reading account from context diff --git a/backend/dataall/cdkproxy/cdk.json b/backend/dataall/base/cdkproxy/cdk.json similarity index 97% rename from backend/dataall/cdkproxy/cdk.json rename to backend/dataall/base/cdkproxy/cdk.json index c570bac27..35abd772f 100644 --- a/backend/dataall/cdkproxy/cdk.json +++ b/backend/dataall/base/cdkproxy/cdk.json @@ -2,7 +2,7 @@ "__app": "python app.py", "context": { "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": false, - "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": false, + "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, "@aws-cdk/aws-rds:lowercaseDbIdentifier": false, "@aws-cdk/core:stackRelativeExports": false } diff --git a/backend/dataall/cdkproxy/cdk_cli_wrapper.py b/backend/dataall/base/cdkproxy/cdk_cli_wrapper.py similarity index 78% rename from backend/dataall/cdkproxy/cdk_cli_wrapper.py rename to backend/dataall/base/cdkproxy/cdk_cli_wrapper.py index 6c8932e03..f0aec3f66 100644 --- a/backend/dataall/cdkproxy/cdk_cli_wrapper.py +++ b/backend/dataall/base/cdkproxy/cdk_cli_wrapper.py @@ -3,27 +3,43 @@ # Additionally, it uses the cdk plugin cdk-assume-role-credential-plugin to run cdk commands on target accounts # see : https://github.com/aws-samples/cdk-assume-role-credential-plugin +import ast import logging import os import subprocess import sys -import ast +from abc import abstractmethod +from typing import Dict import boto3 from botocore.exceptions import ClientError -from ..aws.handlers.sts import SessionHelper -from ..db import Engine -from ..db import models -from ..db.api import Pipeline, Environment, Stack -from ..utils.alarm_service import AlarmService -from dataall.cdkproxy.cdkpipeline.cdk_pipeline import CDKPipelineStack +from dataall.core.stacks.db.stack_models import Stack +from dataall.base.aws.sts import SessionHelper +from dataall.base.db import Engine +from dataall.base.utils.alarm_service import AlarmService logger = logging.getLogger('cdksass') ENVNAME = os.getenv('envname', 'local') +class CDKCliWrapperExtension: + def __init__(self): + pass + + @abstractmethod + def extend_deployment(self, stack, session, env): + raise NotImplementedError("Method extend_deployment is not implemented") + + @abstractmethod + def cleanup(self): + raise NotImplementedError("Method cleanup is not implemented") + + +_CDK_CLI_WRAPPER_EXTENSIONS: Dict[str, CDKCliWrapperExtension] = {} + + def aws_configure(profile_name='default'): print('..............................................') print(' Running configure ') @@ -69,30 +85,11 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s with engine.scoped_session() as session: try: - stack: models.Stack = session.query(models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) logger.warning(f'stackuri = {stack.stackUri}, stackId = {stack.stackid}') stack.status = 'PENDING' session.commit() - if stack.stack == 'cdkpipeline': - cdkpipeline = CDKPipelineStack(stack.targetUri) - venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None - pipeline = Pipeline.get_pipeline_by_uri(session, stack.targetUri) - path = f'./cdkpipeline/{pipeline.repo}/' - app_path = './app.py' - if not venv_name: - logger.info('Successfully Updated CDK Pipeline') - meta = describe_stack(stack) - stack.stackid = meta['StackId'] - stack.status = meta['StackStatus'] - update_stack_output(session, stack) - return - - cwd = ( - os.path.join(os.path.dirname(os.path.abspath(__file__)), path) - if path - else os.path.dirname(os.path.abspath(__file__)) - ) python_path = '/:'.join(sys.path)[1:] + ':/code' logger.info(f'python path = {python_path}') @@ -102,6 +99,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s 'PYTHONPATH': python_path, 'CURRENT_AWS_ACCOUNT': this_aws_account, 'envname': os.environ.get('envname', 'local'), + 'config_location': "/config.json" } if creds: env.update( @@ -112,6 +110,25 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s } ) + extension = _CDK_CLI_WRAPPER_EXTENSIONS.get(stack.stack) + if extension: + logger.info(f'Extending CDK deployment process with steps for the following stack: {stack.stack}') + finish_deployment, path = _CDK_CLI_WRAPPER_EXTENSIONS[stack.stack].extend_deployment( + stack=stack, + session=session, + env=env + ) + if finish_deployment: + return + else: + logger.info(f'There is no CDK deployment extension for {stack.stack}. Proceeding further with the deployment') + + cwd = ( + os.path.join(os.path.dirname(os.path.abspath(__file__)), path) + if path + else os.path.dirname(os.path.abspath(__file__)) + ) + app_path = app_path or './app.py' logger.info(f'app_path: {app_path}') @@ -143,21 +160,6 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s '--verbose', ] - if stack.stack == 'cdkpipeline': - aws = SessionHelper.remote_session(stack.accountid) - creds = aws.get_credentials() - env.update( - { - 'CDK_DEFAULT_REGION': stack.region, - 'AWS_REGION': stack.region, - 'AWS_DEFAULT_REGION': stack.region, - 'CDK_DEFAULT_ACCOUNT': stack.accountid, - 'AWS_ACCESS_KEY_ID': creds.access_key, - 'AWS_SECRET_ACCESS_KEY': creds.secret_key, - 'AWS_SESSION_TOKEN': creds.token, - } - ) - logger.info(f"Running command : \n {' '.join(cmd)}") process = subprocess.run( @@ -168,8 +170,12 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s env=env, cwd=cwd, ) + if stack.stack == 'cdkpipeline': - CDKPipelineStack.clean_up_repo(path=f'./{pipeline.repo}') + if stack.stack not in _CDK_CLI_WRAPPER_EXTENSIONS: + logger.error(f'No CDK CLI wrapper extension is registered for {stack.stack} stack type') + + _CDK_CLI_WRAPPER_EXTENSIONS[stack.stack].cleanup() if process.returncode == 0: meta = describe_stack(stack) @@ -190,7 +196,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s def describe_stack(stack, engine: Engine = None, stackid: str = None): if not stack: with engine.scoped_session() as session: - stack = session.query(models.Stack).get(stackid) + stack = session.query(Stack).get(stackid) if stack.status == 'DELETE_COMPLETE': return {'StackId': stack.stackid, 'StackStatus': stack.status} session = SessionHelper.remote_session(stack.accountid) diff --git a/backend/dataall/cdkproxy/main.py b/backend/dataall/base/cdkproxy/main.py similarity index 93% rename from backend/dataall/cdkproxy/main.py rename to backend/dataall/base/cdkproxy/main.py index 6bbcbf22b..2232c620d 100644 --- a/backend/dataall/cdkproxy/main.py +++ b/backend/dataall/base/cdkproxy/main.py @@ -5,8 +5,8 @@ # GET /stack/{stackid} : returns metadata for the stack # DELETE /Stack/{stackid} : deletes the stack # To run the server locally, simply run -# uvicorn dataall.cdkproxy.main:app --host 0.0.0.0 --port 8080 -# To run in docker, build the image and run the container as described in dataall/cdkproxy/README.md +# uvicorn dataall.base.cdkproxy.main:app --host 0.0.0.0 --port 8080 +# To run in docker, build the image and run the container as described in dataall/base/cdkproxy/README.md import logging import os @@ -19,8 +19,9 @@ import cdk_cli_wrapper as wrapper from stacks import StackManager -from ..db import get_engine -from ..db import models +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.stacks.db.stack_models import Stack +from dataall.base.db import get_engine print('\n'.join(sys.path)) @@ -37,7 +38,7 @@ def connect(): try: engine = get_engine(envname=ENVNAME) with engine.scoped_session() as session: - orgs = session.query(models.Organization).all() + orgs = session.query(Organization).all() return engine except Exception as e: raise Exception('Connection Error') @@ -140,7 +141,7 @@ async def create_stack( } with engine.scoped_session() as session: - stack: models.Stack = session.query(models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_302_FOUND @@ -177,7 +178,7 @@ async def delete_stack( 'message': f'Failed to connect to database for environment `{ENVNAME}`', } with engine.scoped_session() as session: - stack: models.Stack = session.query(models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_302_FOUND @@ -212,7 +213,7 @@ def get_stack(stackid: str, response: Response): 'message': f'Failed to connect to database for environment `{ENVNAME}`', } with engine.scoped_session() as session: - stack: models.Stack = session.query(models.Stack).get(stackid) + stack: Stack = session.query(Stack).get(stackid) if not stack: logger.warning(f'Could not find stack with stackUri `{stackid}`') response.status_code = status.HTTP_404_NOT_FOUND diff --git a/backend/dataall/cdkproxy/requirements.txt b/backend/dataall/base/cdkproxy/requirements.txt similarity index 79% rename from backend/dataall/cdkproxy/requirements.txt rename to backend/dataall/base/cdkproxy/requirements.txt index a7b91a402..5552143fb 100644 --- a/backend/dataall/cdkproxy/requirements.txt +++ b/backend/dataall/base/cdkproxy/requirements.txt @@ -1,5 +1,4 @@ -aws-cdk-lib==2.78.0 -aws_cdk.aws_redshift_alpha==2.14.0a0 +aws-cdk-lib==2.83.1 boto3==1.24.85 boto3-stubs==1.24.85 botocore==1.27.85 @@ -17,4 +16,5 @@ werkzeug==2.3.3 constructs>=10.0.0,<11.0.0 git-remote-codecommit==1.16 aws-ddk==0.5.1 -aws-ddk-core==0.5.1 \ No newline at end of file +aws-ddk-core==0.5.1 +deprecated==1.2.13 \ No newline at end of file diff --git a/backend/dataall/base/cdkproxy/stacks/__init__.py b/backend/dataall/base/cdkproxy/stacks/__init__.py new file mode 100644 index 000000000..7f1cc9b38 --- /dev/null +++ b/backend/dataall/base/cdkproxy/stacks/__init__.py @@ -0,0 +1,7 @@ +from .manager import stack, instanciate_stack, StackManager + +__all__ = [ + 'StackManager', + 'stack', + 'instanciate_stack', +] diff --git a/backend/dataall/cdkproxy/stacks/manager.py b/backend/dataall/base/cdkproxy/stacks/manager.py similarity index 100% rename from backend/dataall/cdkproxy/stacks/manager.py rename to backend/dataall/base/cdkproxy/stacks/manager.py diff --git a/backend/dataall/base/config.py b/backend/dataall/base/config.py new file mode 100644 index 000000000..80f13e490 --- /dev/null +++ b/backend/dataall/base/config.py @@ -0,0 +1,70 @@ +"""Reads and encapsulates the configuration provided in config.json""" +import json +import copy +from typing import Any, Dict +import os +from pathlib import Path + + +class _Config: + """A container of properties in the configuration file + and any other that can be specified/overwritten later in the application""" + + def __init__(self): + self._config = _Config._read_config_file() + + def get_property(self, key: str, default=None) -> Any: + """ + Retrieves a copy of the property + Config uses dot as a separator to navigate easy to the needed property e.g. + some.needed.parameter is equivalent of config["some"]["needed"]["parameter"] + It enables fast navigation for any nested parameter + """ + res = self._config + + props = key.split(".") + + # going through the hierarchy of json + for prop in props: + if prop not in res: + if default is not None: + return default + + raise KeyError(f"Couldn't find a property {key} in the config") + + res = res[prop] + return copy.deepcopy(res) + + def set_property(self, key: str, value: Any) -> None: + """ + Sets a property into the config + If the property has dot it will be split to nested levels + """ + conf = self._config + props = key.split(".") + + for i, prop in enumerate(props): + if i == len(props) - 1: + conf[prop] = value + else: + conf[prop] = conf[prop] if prop in conf is not None else {} + conf = conf[prop] + + @staticmethod + def _read_config_file() -> Dict[str, Any]: + with open(_Config._path_to_file()) as config_file: + return json.load(config_file) + + @staticmethod + def _path_to_file() -> str: + """Tries to get a property. If not defined it tries to resolve the config from the current file's directory""" + path = os.getenv("config_location") + if path: + return path + return os.path.join(Path(__file__).parents[3], "config.json") + + def __repr__(self): + return str(self._config) + + +config = _Config() diff --git a/backend/dataall/base/context.py b/backend/dataall/base/context.py new file mode 100644 index 000000000..50434ed3f --- /dev/null +++ b/backend/dataall/base/context.py @@ -0,0 +1,40 @@ +""" +API for request context. +Request context is a storage for associated with the request and should accessible from any part of application +that in the request scope + +The class uses Flask's approach to handle request: ThreadLocal +That approach should work fine for AWS Lambdas and local server that uses Flask app +""" + +from dataclasses import dataclass +from typing import List + +from dataall.base.db.connection import Engine +from threading import local + + +_request_storage = local() + + +@dataclass(frozen=True) +class RequestContext: + """Contains API for every graphql request""" + db_engine: Engine + username: str + groups: List[str] + + +def get_context() -> RequestContext: + """Retrieves context associated with a request""" + return _request_storage.context + + +def set_context(context: RequestContext) -> None: + """Retrieves context associated with a request""" + _request_storage.context = context + + +def dispose_context() -> None: + """Dispose context after the request completion""" + _request_storage.context = None diff --git a/backend/dataall/base/db/__init__.py b/backend/dataall/base/db/__init__.py new file mode 100644 index 000000000..0444c338b --- /dev/null +++ b/backend/dataall/base/db/__init__.py @@ -0,0 +1,13 @@ +from .base import Base, Resource +from . import exceptions +from .connection import ( + Engine, + get_engine, + create_schema_if_not_exists, + create_schema_and_tables, + has_table, + has_column, + drop_schema_if_exists, +) +from .dbconfig import DbConfig +from .paginator import paginate diff --git a/backend/dataall/db/base.py b/backend/dataall/base/db/base.py similarity index 100% rename from backend/dataall/db/base.py rename to backend/dataall/base/db/base.py diff --git a/backend/dataall/db/connection.py b/backend/dataall/base/db/connection.py similarity index 84% rename from backend/dataall/db/connection.py rename to backend/dataall/base/db/connection.py index 8b5016b50..f7ff1d4c3 100644 --- a/backend/dataall/db/connection.py +++ b/backend/dataall/base/db/connection.py @@ -3,15 +3,15 @@ import os from contextlib import contextmanager -import boto3 import sqlalchemy from sqlalchemy.engine import reflection from sqlalchemy.orm import sessionmaker -from .. import db -from ..db import Base -from ..db.dbconfig import DbConfig -from ..utils import Parameter, Secrets +from dataall.base.aws.secrets_manager import SecretsManager +from dataall.base.db import Base +from dataall.base.db.dbconfig import DbConfig +from dataall.base.utils import Parameter +from dataall.base.aws.sts import SessionHelper try: from urllib import quote_plus, unquote_plus @@ -86,22 +86,15 @@ def create_schema_if_not_exists(engine, envname): def create_schema_and_tables(engine, envname): + drop_schema_if_exists(engine.engine, envname) create_schema_if_not_exists(engine.engine, envname) try: - Base.metadata.drop_all(engine.engine) Base.metadata.create_all(engine.engine) except Exception as e: log.error(f'Failed to create all tables due to: {e}') raise e -def init_permissions(engine, envname=None): - with engine.scoped_session() as session: - log.info('Initiating permissions') - db.api.Tenant.save_tenant(session, name='dataall', description='Tenant dataall') - db.api.Permission.init_permissions(session) - - def drop_schema_if_exists(engine, envname): try: if engine.dialect.has_schema(engine, envname): @@ -115,13 +108,8 @@ def get_engine(envname=ENVNAME): schema = os.getenv('schema_name', envname) if envname not in ['local', 'pytest', 'dkrcompose']: param_store = Parameter() - secret = Secrets() credential_arn = param_store.get_parameter(env=envname, path='aurora/dbcreds') - secretsmanager = boto3.client( - 'secretsmanager', region_name=os.environ.get('AWS_REGION', 'eu-west-1') - ) - db_credentials_string = secretsmanager.get_secret_value(SecretId=credential_arn) - creds = json.loads(db_credentials_string['SecretString']) + creds = json.loads(SecretsManager().get_secret_value(credential_arn)) user = creds['username'] pwd = creds['password'] host = param_store.get_parameter(env=envname, path='aurora/hostname') diff --git a/backend/dataall/db/dbconfig.py b/backend/dataall/base/db/dbconfig.py similarity index 100% rename from backend/dataall/db/dbconfig.py rename to backend/dataall/base/db/dbconfig.py diff --git a/backend/dataall/db/exceptions.py b/backend/dataall/base/db/exceptions.py similarity index 94% rename from backend/dataall/db/exceptions.py rename to backend/dataall/base/db/exceptions.py index 3453327a8..20b8c9973 100644 --- a/backend/dataall/db/exceptions.py +++ b/backend/dataall/base/db/exceptions.py @@ -161,18 +161,6 @@ def __str__(self): return f'{self.message}' -class ShareItemsFound(Exception): - def __init__(self, action, message): - self.action = action - self.message = f""" - An error occurred (ShareItemsFound) when calling {self.action} operation: - {message} - """ - - def __str__(self): - return f'{self.message}' - - class OrganizationResourcesFound(Exception): def __init__(self, action, message): self.action = action diff --git a/backend/dataall/db/paginator.py b/backend/dataall/base/db/paginator.py similarity index 100% rename from backend/dataall/db/paginator.py rename to backend/dataall/base/db/paginator.py diff --git a/backend/dataall/db/utils.py b/backend/dataall/base/db/utils.py similarity index 91% rename from backend/dataall/db/utils.py rename to backend/dataall/base/db/utils.py index 06747a68f..cb31c6677 100644 --- a/backend/dataall/db/utils.py +++ b/backend/dataall/base/db/utils.py @@ -2,7 +2,7 @@ import nanoid -from ..utils.slugify import slugify +from dataall.base.utils.slugify import slugify def uuid(resource_type='undefined', parent_field=''): diff --git a/backend/dataall/base/loader.py b/backend/dataall/base/loader.py new file mode 100644 index 000000000..c43f0e7f5 --- /dev/null +++ b/backend/dataall/base/loader.py @@ -0,0 +1,271 @@ +"""Load modules that are specified in the configuration file""" +import importlib +import logging +import sys +from abc import ABC, abstractmethod +from collections import defaultdict, deque +from enum import Enum, auto +from typing import List, Type, Set + +from dataall.base.config import config + +log = logging.getLogger(__name__) + +_MODULE_PREFIX = "dataall.modules" + +# This needed not to load the same module twice. Should happen only in tests +_ACTIVE_MODES = set() +# Contains all loaded moduels +_LOADED_MODULES: Set[str] = set() + + +class ImportMode(Enum): + """Defines importing mode + + Since there are different infrastructure components that requires only part + of functionality to be loaded, there should be different loading modes + """ + + API = auto() + CDK = auto() + HANDLERS = auto() + STACK_UPDATER_TASK = auto() + CATALOG_INDEXER_TASK = auto() + + @staticmethod + def all(): + return {mode for mode in ImportMode} + + +class ModuleInterface(ABC): + """ + An interface of the module. The implementation should be part of __init__.py of the module + Contains an API that will be called from core part + """ + @staticmethod + @abstractmethod + def is_supported(modes: Set[ImportMode]) -> bool: + """ + Return True if the module interface supports any of the ImportMode and should be loaded + """ + raise NotImplementedError("is_supported is not implemented") + + @classmethod + def name(cls) -> str: + """ + Returns name of the module. Should be the same if it's specified in the config file + """ + return _remove_module_prefix(cls.__module__) + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + """ + It describes on what modules this ModuleInterface depends on. + It will be used to eventually load these module dependencies. Even if a dependency module is not active + in the config file. + + The default value is no dependencies + """ + return [] + + +def load_modules(modes: Set[ImportMode]) -> None: + """ + Loads all modules from the config + Loads only requested functionality (submodules) using the mode parameter + """ + + to_load = _new_modules(modes) + if not to_load: + return + + in_config, inactive = _load_modules() + _check_loading_correct(in_config, to_load) + _initialize_modules(to_load) + _describe_loading(in_config, inactive) + + log.info("All modules have been imported") + + +def list_loaded_modules() -> List[str]: + return list(_LOADED_MODULES) + + +def _new_modules(modes: Set[ImportMode]): + """ + Extracts only new modules to load. It's needed to avoid multiply loading + """ + all_modes = _ACTIVE_MODES + + to_load = modes - all_modes # complement of set + all_modes |= modes + return to_load + + +def _load_modules(): + """ + Loads modules but not initializing them + """ + modules = _get_modules_from_config() + inactive = set() + in_config = set() + for name, props in modules.items(): + + if "active" not in props: + raise ValueError(f"Status is not defined for {name} module") + + active = props["active"] + + if not active: + log.info(f"Module {name} is not active. Skipping...") + inactive.add(name) + continue + + in_config.add(name) + if not _load_module(name): + raise ValueError(f"Couldn't find module {name} under modules directory") + + log.info(f"Module {name} is loaded") + return in_config, inactive + + +def _get_modules_from_config(): + try: + modules = config.get_property("modules") + except KeyError as e: + raise KeyError('"modules" has not been found in the config file. Nothing to load') from e + + log.info("Found %d modules that have been found in the config", len(modules)) + return modules + + +def _load_module(name: str): + """ + Loads a module but not initializing it + """ + try: + importlib.import_module(f"{_MODULE_PREFIX}.{name}") + return True + except ModuleNotFoundError as e: + log.error(f"Couldn't load module due to: {e}") + return False + + +def _initialize_modules(modes: Set[ImportMode]): + """ + Initialize all modules for supported modes. This method is using topological sorting for a graph of module + dependencies. It's needed to load module in a specific order: first modules to load are without dependencies. + It might help to avoid possible issues if there is a load in the module constructor (which can be the case + if a module supports a few importing modes). + """ + modules = _all_modules() + dependencies = defaultdict(list) + degrees = defaultdict(int) + supported = [] + for module in modules: + if module.is_supported(modes): + supported.append(module) + degrees[module] += len(module.depends_on()) + for dependency in module.depends_on(): + dependencies[dependency].append(module) + + queue = deque() + for module in supported: + if degrees[module] == 0: + queue.append(module) + + initialized = 0 + while queue: + to_init = queue.popleft() + _initialize_module(to_init) + initialized += 1 + + for dependant in dependencies[to_init]: + degrees[dependant] -= 1 + if degrees[dependant] == 0: + queue.append(dependant) + + if initialized < len(degrees): + # Happens if the ModuleInterface for dependency doesn't support import mode + # The case when there is circular imports should already be covered by python loader + raise ImportError("Not all modules have been initialized. Check if your import modes are correct") + + +def _get_module_name(module): + return module[len(_MODULE_PREFIX) + 1:].split(".")[0] # gets only top level module name + + +def _initialize_module(module: Type[ModuleInterface]): + module() # call a constructor for initialization + _LOADED_MODULES.add(module.name()) + + +def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]): + """ + To avoid unintentional loading (without ModuleInterface) we can check all loaded modules. + Unintentional/incorrect loading might happen if module A has a direct reference to module B without declaring it + in ModuleInterface. Doing so, this might lead to a problem when a module interface require to load something during + initialization. But since ModuleInterface is not initializing properly (using depends_on) + some functionality may work wrongly. + """ + expected_load = set() + # 1) Adds all modules to load + for module in _all_modules(): + if module.is_supported(modes) and module.name() in in_config: + expected_load.add(module) + + # 2) Add all dependencies + to_add = list(expected_load) + while to_add: + new_to_add = [] + while to_add: + module = to_add.pop() + for dependency in module.depends_on(): + if dependency not in expected_load: + expected_load.add(dependency) + if not dependency.is_supported(modes): + raise ImportError(f"Dependency {dependency.name()} doesn't support {modes}") + + new_to_add.append(dependency) + to_add = new_to_add + + # 3) Checks all found ModuleInterfaces + for module in _all_modules(): + if module.is_supported(modes) and module not in expected_load: + raise ImportError( + f"ModuleInterface has not been initialized for module {module.name()}. " + "Declare the module in depends_on" + ) + + # 4) Checks all references for modules (when ModuleInterfaces don't exist or not supported) + checked_module_names = {module.name() for module in expected_load} + # Modules from the config that doesn't support the current mode weren't added in Step1, adding them here + checked_module_names |= in_config + for module in sys.modules.keys(): + if module.startswith(_MODULE_PREFIX) and module != __name__: # skip loader + name = _get_module_name(module) + if name and name not in checked_module_names: + raise ImportError(f"The package {module} has been imported, but it doesn't contain ModuleInterface") + + +def _describe_loading(in_config: Set[str], inactive: Set[str]): + modules = _all_modules() + for module in modules: + name = module.name() + log.debug(f"The {name} module was loaded") + if name in inactive: + log.info(f"There is a module that depends on {module.name()}. " + "The module has been loaded despite it's inactive.") + elif name not in in_config: + log.info(f"There is a module that depends on {module.name()}. " + "The module has been loaded despite it's not specified in the configuration file.") + + +def _remove_module_prefix(module: str): + if module.startswith(_MODULE_PREFIX): + return module[len(_MODULE_PREFIX) + 1:] + raise ValueError(f"Module {module} should always starts with {_MODULE_PREFIX}") + + +def _all_modules(): + return ModuleInterface.__subclasses__() diff --git a/backend/dataall/base/searchproxy/__init__.py b/backend/dataall/base/searchproxy/__init__.py new file mode 100644 index 000000000..8dab74fea --- /dev/null +++ b/backend/dataall/base/searchproxy/__init__.py @@ -0,0 +1,7 @@ +from .connect import connect +from .search import run_query + +__all__ = [ + 'connect', + 'run_query', +] diff --git a/backend/dataall/searchproxy/connect.py b/backend/dataall/base/searchproxy/connect.py similarity index 99% rename from backend/dataall/searchproxy/connect.py rename to backend/dataall/base/searchproxy/connect.py index 3c952f5c9..24383169e 100644 --- a/backend/dataall/searchproxy/connect.py +++ b/backend/dataall/base/searchproxy/connect.py @@ -5,7 +5,7 @@ import opensearchpy from requests_aws4auth import AWS4Auth -from .. import utils +from dataall.base import utils CREATE_INDEX_REQUEST_BODY = { 'mappings': { diff --git a/backend/dataall/searchproxy/search.py b/backend/dataall/base/searchproxy/search.py similarity index 100% rename from backend/dataall/searchproxy/search.py rename to backend/dataall/base/searchproxy/search.py diff --git a/backend/dataall/base/utils/__init__.py b/backend/dataall/base/utils/__init__.py new file mode 100644 index 000000000..c5e3306f6 --- /dev/null +++ b/backend/dataall/base/utils/__init__.py @@ -0,0 +1,2 @@ +from .parameter import Parameter +from .slugify import slugify diff --git a/backend/dataall/base/utils/alarm_service.py b/backend/dataall/base/utils/alarm_service.py new file mode 100644 index 000000000..5927dbefb --- /dev/null +++ b/backend/dataall/base/utils/alarm_service.py @@ -0,0 +1,77 @@ +# This module is a wrapper for the cdk cli +# Python native subprocess package is used to spawn cdk [deploy|destroy] commands with appropriate parameters. +# Additionally, it uses the cdk plugin cdk-assume-role-credential-plugin to run cdk commands on target accounts +# see : https://github.com/aws-samples/cdk-assume-role-credential-plugin + +import logging +import os +from datetime import datetime + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.core.stacks.db.stack_models import Stack + +logger = logging.getLogger(__name__) + + +class AlarmService: + def __init__(self): + self.envname = os.getenv('envname', 'local') + self.region = os.environ.get('AWS_REGION', 'eu-west-1') + + def trigger_stack_deployment_failure_alarm(self, stack: Stack): + logger.info('Triggering deployment failure alarm...') + subject = f'ALARM: DATAALL Stack {stack.name} Deployment Failure Notification' + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to deploy one of its resource CloudFormation stacks {stack.name} + +View the ECS task logs in the AWS Management Console: +https://{self.region}.console.aws.amazon.com/cloudwatch/deeplink.js?region=eu-west-1#logsV2:log-groups/log-group/$252Fdataall$252F{self.envname}$252Fecs$252Fcdkproxy/log-events/task$252Fcontainer$252F{stack.EcsTaskArn.split('/')[-1]} + +Alarm Details: +- Stack Name: {stack.name} +- AWS Account: {stack.accountid} +- Region: {stack.region} +- State Change: OK -> ALARM +- Reason for State Change: Stack Deployment Failure +- Timestamp: {datetime.now()} +- CW Log Group: {f"/dataall/{self.envname}/cdkproxy/{stack.EcsTaskArn.split('/')[-1]}"} +""" + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_catalog_indexing_failure_alarm(self, error: str): + logger.info('Triggering catalog indexing failure alarm...') + subject = 'ALARM: DATAALL Catalog Indexing Failure Notification' + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to index new items into OpenSearch. + +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: {error} + - Timestamp: {datetime.now()} +""" + return self.publish_message_to_alarms_topic(subject, message) + + def publish_message_to_alarms_topic(self, subject, message): + if self.envname in ['local', 'pytest', 'dkrcompose']: + logger.debug('Running in local mode...SNS topic not available') + else: + region = os.getenv('AWS_REGION', 'eu-west-1') + session = SessionHelper.get_session() + ssm = session.client('ssm', region_name=region) + sns = session.client('sns', region_name=region) + alarms_topic_arn = ssm.get_parameter( + Name=f'/dataall/{self.envname}/sns/alarmsTopic' + )['Parameter']['Value'] + try: + logger.info('Sending deployment failure notification') + response = sns.publish( + TopicArn=alarms_topic_arn, + Subject=subject, + Message=message, + ) + return response + except ClientError as e: + logger.error(f'Failed to deliver message due to: {e} ') + raise e diff --git a/backend/dataall/utils/cdk_nag_utils.py b/backend/dataall/base/utils/cdk_nag_utils.py similarity index 100% rename from backend/dataall/utils/cdk_nag_utils.py rename to backend/dataall/base/utils/cdk_nag_utils.py diff --git a/backend/dataall/base/utils/decorator_utls.py b/backend/dataall/base/utils/decorator_utls.py new file mode 100644 index 000000000..bccc29e02 --- /dev/null +++ b/backend/dataall/base/utils/decorator_utls.py @@ -0,0 +1,14 @@ +def process_func(func): + """Helper function that helps decorate methods/functions""" + def no_decorated(f): + return f + + static_func = False + try: + fn = func.__func__ + static_func = True + except AttributeError: + fn = func + + # returns a function to call and static decorator if applied + return fn, staticmethod if static_func else no_decorated diff --git a/backend/dataall/utils/json_utils.py b/backend/dataall/base/utils/json_utils.py similarity index 100% rename from backend/dataall/utils/json_utils.py rename to backend/dataall/base/utils/json_utils.py diff --git a/backend/dataall/utils/naming_convention.py b/backend/dataall/base/utils/naming_convention.py similarity index 100% rename from backend/dataall/utils/naming_convention.py rename to backend/dataall/base/utils/naming_convention.py diff --git a/backend/dataall/utils/parameter.py b/backend/dataall/base/utils/parameter.py similarity index 100% rename from backend/dataall/utils/parameter.py rename to backend/dataall/base/utils/parameter.py diff --git a/backend/dataall/utils/slugify.py b/backend/dataall/base/utils/slugify.py similarity index 100% rename from backend/dataall/utils/slugify.py rename to backend/dataall/base/utils/slugify.py diff --git a/backend/dataall/utils/sql_utils.py b/backend/dataall/base/utils/sql_utils.py similarity index 100% rename from backend/dataall/utils/sql_utils.py rename to backend/dataall/base/utils/sql_utils.py diff --git a/backend/dataall/cdkproxy/README.md b/backend/dataall/cdkproxy/README.md deleted file mode 100644 index 497e863bb..000000000 --- a/backend/dataall/cdkproxy/README.md +++ /dev/null @@ -1,50 +0,0 @@ - - -## About -`cdkproxy` is a package that exposes a REST API to run pre-defined -cloudformation stacks using the aws cdk package. - -It is deployed as a docker container running on AWS ECS. - -## How it works - -cdkproxy exposes a REST API to manage pre-defined stacks. -It reads and updates tasks from the dataall database. -Somes APIs are run asynchrnously , returning an id for subsequent reads. -Some APIs are run synchrnously. - - -Pre-defined cdk stacks are defined in the stack package. -To register a pre-defined stack, use the `@stack` decorator as in the example below : - -```python - -from aws_cdk import ( - aws_s3 as s3, - aws_sqs as sqs, - core -) -from dataall.cdkproxy.stacks import stack - -@stack(stack="mypredefinedstack") -class MyPredefinedStack(core.Stack): - def __init__(self, scope, id, **kwargs): - super().__init__(scope, id, **kwargs) - #constructs goes here - -``` - - -## Local setup - -### pre requisites - -1. You must have docker installed -2. You must have ~/.aws folder with your aws credentials - -### build the image -At the root folder: -`docker build --network=host -t cdkproxy:latest . ` - -### Run the image -`docker run --network host -p 8080:8080 -v /home/moshir/.aws:/root/.aws:ro --name cdkproxy cdkproxy:latest ` diff --git a/backend/dataall/cdkproxy/cdkpipeline/__init__.py b/backend/dataall/cdkproxy/cdkpipeline/__init__.py deleted file mode 100644 index 2d5006e05..000000000 --- a/backend/dataall/cdkproxy/cdkpipeline/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .cdk_pipeline import CDKPipelineStack - -__all__ = [ - 'CDKPipelineStack' -] diff --git a/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py b/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py deleted file mode 100644 index 66829b6fc..000000000 --- a/backend/dataall/cdkproxy/cdkpipeline/cdk_pipeline.py +++ /dev/null @@ -1,282 +0,0 @@ -import logging -import os -import sys -import subprocess -import boto3 - -from ... import db -from ...db.api import Environment, Pipeline -from ...aws.handlers.sts import SessionHelper -from botocore.exceptions import ClientError - -logger = logging.getLogger(__name__) - - -class CDKPipelineStack: - """ - Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. - - The pipeline is based on AWS DDK CICD CodePipeline pipelines - - - Defaults for source/synth - CodeCommit & cdk synth - - blueprint with DDK application code added in the CodeCommit repository - - ability to define development stages: dev, test, prod - - Ability to connect to private artifactory to pull artifacts from at synth - - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys - - data.all metadata as environment variables accesible at synth - - """ - def get_engine(self): - envname = os.environ.get("envname", "local") - engine = db.get_engine(envname=envname) - return engine - - module_name = __file__ - - def __init__(self, target_uri): - engine = self.get_engine() - with engine.scoped_session() as session: - - self.pipeline = Pipeline.get_pipeline_by_uri(session, target_uri) - self.pipeline_environment = Environment.get_environment_by_uri(session, self.pipeline.environmentUri) - # Development environments - self.development_environments = Pipeline.query_pipeline_environments(session, target_uri) - - self.env, aws = CDKPipelineStack._set_env_vars(self.pipeline_environment) - - self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) - - try: - codecommit_client = aws.client('codecommit', region_name=self.pipeline_environment.region) - repository = CDKPipelineStack._check_repository(codecommit_client, self.pipeline.repo) - if repository: - self.venv_name = None - self.code_dir_path = os.path.realpath( - os.path.abspath( - os.path.join( - __file__, "..", "..", "blueprints", "data_pipeline_blueprint" - ) - ) - ) - CDKPipelineStack.write_ddk_json_multienvironment(path=self.code_dir_path, output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) - CDKPipelineStack.write_ddk_app_multienvironment(path=self.code_dir_path, output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) - - logger.info(f"Pipeline Repo {self.pipeline.repo} Exists...Handling Update") - update_cmds = [ - f'REPO_NAME={self.pipeline.repo}', - 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', - 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', - 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', - 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://app.py --file-path app.py --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', - ] - - process = subprocess.run( - "; ".join(update_cmds), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=self.code_dir_path, - env=self.env - ) - else: - raise Exception - except Exception as e: - self.venv_name = self.initialize_repo() - CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) - CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) - self.git_push_repo() - - def initialize_repo(self): - venv_name = ".venv" - cmd_init = [ - f"ddk init {self.pipeline.repo} --generate-only", - f"cd {self.pipeline.repo}", - "git init --initial-branch main", - f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" - ] - - logger.info(f"Running Commands: {'; '.join(cmd_init)}") - - process = subprocess.run( - '; '.join(cmd_init), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=self.code_dir_path, - env=self.env - ) - if process.returncode == 0: - logger.info("Successfully Initialized New CDK/DDK App") - - return venv_name - - @staticmethod - def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): - json_envs = "" - for env in development_environments: - json_env = f""", - "{env.stage}": {{ - "account": "{env.AwsAccountId}", - "region": "{env.region}", - "resources": {{ - "ddk-bucket": {{"versioned": false, "removal_policy": "destroy"}} - }} - }}""" - json_envs = json_envs + json_env - - json = f"""{{ - "environments": {{ - "cicd": {{ - "account": "{pipeline_environment.AwsAccountId}", - "region": "{pipeline_environment.region}" - }}{json_envs} - }} -}}""" - - with open(f'{path}/{output_file}', 'w') as text_file: - print(json, file=text_file) - - @staticmethod - def write_ddk_app_multienvironment(path, output_file, pipeline, development_environments): - header = f""" -# !/usr/bin/env python3 - -import aws_cdk as cdk -from aws_ddk_core.cicd import CICDPipelineStack -from ddk_app.ddk_app_stack import DdkApplicationStack -from aws_ddk_core.config import Config - -app = cdk.App() - -class ApplicationStage(cdk.Stage): - def __init__( - self, - scope, - environment_id: str, - **kwargs, - ) -> None: - super().__init__(scope, f"dataall-{{environment_id.title()}}", **kwargs) - DdkApplicationStack(self, "DataPipeline-{pipeline.label}-{pipeline.DataPipelineUri}", environment_id) - -id = f"dataall-cdkpipeline-{pipeline.DataPipelineUri}" -config = Config() -( - CICDPipelineStack( - app, - id=id, - environment_id="cicd", - pipeline_name="{pipeline.label}", - ) - .add_source_action(repository_name="{pipeline.repo}") - .add_synth_action() - .build()""" - - stages = "" - for env in sorted(development_environments, key=lambda env: env.order): - stage = f""".add_stage("{env.stage}", ApplicationStage(app, "{env.stage}", env=config.get_env("{env.stage}")))""" - stages = stages + stage - footer = """ - .synth() -) - -app.synth() -""" - app = header + stages + footer - - with open(f'{path}/{output_file}', 'w') as text_file: - print(app, file=text_file) - - def git_push_repo(self): - git_cmds = [ - 'git config user.email "codebuild@example.com"', - 'git config user.name "CodeBuild"', - 'git config --local credential.helper "!aws codecommit credential-helper $@"', - "git config --local credential.UseHttpPath true", - "git add .", - "git commit -a -m 'Initial Commit' ", - "git push -u origin main" - ] - - logger.info(f"Running Commands: {'; '.join(git_cmds)}") - - process = subprocess.run( - '; '.join(git_cmds), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=os.path.join(self.code_dir_path, self.pipeline.repo), - env=self.env - ) - if process.returncode == 0: - logger.info("Successfully Pushed DDK App Code") - - @staticmethod - def clean_up_repo(path): - if path: - precmd = [ - 'deactivate;', - 'rm', - '-rf', - f"{path}" - ] - - cwd = os.path.dirname(os.path.abspath(__file__)) - logger.info(f"Running command : \n {' '.join(precmd)}") - - process = subprocess.run( - ' '.join(precmd), - text=True, - shell=True, # nosec - encoding='utf-8', - capture_output=True, - cwd=cwd - ) - - if process.returncode == 0: - print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") - else: - logger.error( - f'Failed clean cloned repo: {path} due to {str(process.stderr)}' - ) - else: - logger.info(f"Info:Path {path} not found") - return - - @staticmethod - def _check_repository(codecommit_client, repo_name): - repository = None - logger.info(f"Checking Repository Exists: {repo_name}") - try: - repository = codecommit_client.get_repository(repositoryName=repo_name) - except ClientError as e: - if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': - logger.debug(f'Repository does not exists {repo_name} %s', e) - else: - raise e - return repository if repository else None - - @staticmethod - def _set_env_vars(pipeline_environment): - aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) - env_creds = aws.get_credentials() - - python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') - - env = { - 'AWS_REGION': pipeline_environment.region, - 'AWS_DEFAULT_REGION': pipeline_environment.region, - 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, - 'PYTHONPATH': python_path, - 'PATH': python_path, - 'envname': os.environ.get('envname', 'local'), - 'COOKIECUTTER_CONFIG': "/dataall/cdkproxy/blueprints/cookiecutter_config.yaml", - } - if env_creds: - env.update( - { - 'AWS_ACCESS_KEY_ID': env_creds.access_key, - 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, - 'AWS_SESSION_TOKEN': env_creds.token - } - ) - return env, aws diff --git a/backend/dataall/cdkproxy/cfnstacks/sagemaker-domain-template.yaml b/backend/dataall/cdkproxy/cfnstacks/sagemaker-domain-template.yaml deleted file mode 100644 index c1e075df9..000000000 --- a/backend/dataall/cdkproxy/cfnstacks/sagemaker-domain-template.yaml +++ /dev/null @@ -1,21 +0,0 @@ -Parameters: - auth_mode: - Description: "Auth mode for Sagemaker Domain" - domain_name: - Description: "domain name for Sagemaker Domain" - subnet_ids: - Description: "subnet ids for Sagemaker Domain" - vpc_id: - Description: "vpc id for Sagemaker Domain" - default_execution_role_user: - Description: "default execution role user for Sagemaker Domain" -Resources: - SagemakerDomainCDK: - Type: AWS::SageMaker::Domain - Properties: - AuthMode: !Ref auth_mode - DefaultUserSettings: - ExecutionRole: !Ref default_execution_role_user - DomainName: !Ref domain_name - SubnetIds: !Ref subnet_ids - VpcId: !Ref vpc_id diff --git a/backend/dataall/cdkproxy/stacks/__init__.py b/backend/dataall/cdkproxy/stacks/__init__.py deleted file mode 100644 index 81abe263b..000000000 --- a/backend/dataall/cdkproxy/stacks/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from .dataset import Dataset -from .environment import EnvironmentSetup -from .pipeline import PipelineStack -from .manager import stack, instanciate_stack, StackManager -from .notebook import SagemakerNotebook -from .redshift_cluster import RedshiftStack -from .sagemakerstudio import SagemakerStudioUserProfile - -__all__ = [ - 'EnvironmentSetup', - 'Dataset', - 'StackManager', - 'stack', - 'StackManager', - 'instanciate_stack', -] diff --git a/backend/dataall/cdkproxy/stacks/dataset.py b/backend/dataall/cdkproxy/stacks/dataset.py deleted file mode 100644 index 3e75633d3..000000000 --- a/backend/dataall/cdkproxy/stacks/dataset.py +++ /dev/null @@ -1,536 +0,0 @@ -import logging -import os - -from aws_cdk import ( - aws_s3 as s3, - aws_kms as kms, - aws_iam as iam, - aws_ssm as ssm, - aws_glue as glue, - Stack, - Duration, - CfnResource, - CustomResource, - Tags, -) -from aws_cdk.aws_glue import CfnCrawler - -from .manager import stack -from ... import db -from ...aws.handlers.lakeformation import LakeFormation -from ...aws.handlers.quicksight import Quicksight -from ...aws.handlers.sts import SessionHelper -from ...db import models -from ...db.api import Environment -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -@stack(stack='dataset') -class Dataset(Stack): - """Deploy common dataset resources: - - dataset S3 Bucket + KMS key (If S3 Bucket not imported) - - dataset IAM role - - custom resource to create glue database and grant permissions - - custom resource to register S3 location in LF - - Glue crawler - - Glue profiling job - """ - module_name = __file__ - - def get_engine(self) -> db.Engine: - envname = os.environ.get('envname', 'local') - engine = db.get_engine(envname=envname) - return engine - - def get_env(self, dataset) -> models.Environment: - engine = self.get_engine() - with engine.scoped_session() as session: - env = session.query(models.Environment).get(dataset.environmentUri) - return env - - def get_env_group(self, dataset) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, dataset.SamlAdminGroupName, dataset.environmentUri - ) - return env - - def get_target_with_uri(self, target_uri) -> models.Dataset: - engine = self.get_engine() - with engine.scoped_session() as session: - dataset = session.query(models.Dataset).get(target_uri) - if not dataset: - raise Exception('ObjectNotFound') - return dataset - - def get_target(self) -> models.Dataset: - engine = self.get_engine() - with engine.scoped_session() as session: - dataset = session.query(models.Dataset).get(self.target_uri) - if not dataset: - raise Exception('ObjectNotFound') - return dataset - - def __init__(self, scope, id, target_uri: str = None, **kwargs): - super().__init__( - scope, - id, - description="Cloud formation stack of DATASET: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target_with_uri(target_uri=target_uri).label, - target_uri, - self.get_target_with_uri(target_uri=target_uri).description, - )[:1024], - **kwargs) - - # Read input - self.target_uri = target_uri - self.pivot_role_name = SessionHelper.get_delegation_role_name() - dataset = self.get_target() - env = self.get_env(dataset) - env_group = self.get_env_group(dataset) - - quicksight_default_group_arn = None - if env.dashboardsEnabled: - quicksight_default_group_arn = f"arn:aws:quicksight:{dataset.region}:{dataset.AwsAccountId}:group/default/{Quicksight._DEFAULT_GROUP_NAME}" - - # Dataset S3 Bucket and KMS key - dataset_key = False - if dataset.imported and dataset.importedS3Bucket: - dataset_bucket = s3.Bucket.from_bucket_name( - self, f'ImportedBucket{dataset.datasetUri}', dataset.S3BucketName - ) - if dataset.importedKmsKey: - dataset_key = kms.Key.from_lookup( - self, f'ImportedKey{dataset.datasetUri}', alias_name=f"alias/{dataset.KmsAlias}" - ) - else: - dataset_key = kms.Key( - self, - 'DatasetKmsKey', - alias=dataset.KmsAlias, - enable_key_rotation=True, - policy=iam.PolicyDocument( - statements=[ - iam.PolicyStatement( - sid="EnableDatasetOwnerKeyUsage", - resources=['*'], - effect=iam.Effect.ALLOW, - principals=[ - iam.ArnPrincipal(env_group.environmentIAMRoleArn), - ], - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ], - ), - iam.PolicyStatement( - sid='KMSPivotRolePermissions', - effect=iam.Effect.ALLOW, - actions=[ - 'kms:Decrypt', - 'kms:Encrypt', - 'kms:GenerateDataKey*', - 'kms:PutKeyPolicy', - "kms:GetKeyPolicy", - 'kms:ReEncrypt*', - 'kms:TagResource', - 'kms:UntagResource', - 'kms:DeleteAlias', - 'kms:DescribeKey', - 'kms:CreateAlias', - 'kms:List*', - ], - resources=['*'], - principals=[ - iam.ArnPrincipal(f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}') - ], - ) - ] - ), - admins=[ - iam.ArnPrincipal(env.CDKRoleArn), - ] - ) - - dataset_bucket = s3.Bucket( - self, - 'DatasetBucket', - bucket_name=dataset.S3BucketName, - encryption=s3.BucketEncryption.KMS, - encryption_key=dataset_key, - cors=[ - s3.CorsRule( - allowed_methods=[ - s3.HttpMethods.HEAD, - s3.HttpMethods.POST, - s3.HttpMethods.PUT, - s3.HttpMethods.DELETE, - s3.HttpMethods.GET, - ], - allowed_origins=['*'], - allowed_headers=['*'], - exposed_headers=[], - ) - ], - block_public_access=s3.BlockPublicAccess.BLOCK_ALL, - server_access_logs_bucket=s3.Bucket.from_bucket_name( - self, - 'EnvAccessLogsBucket', - f'{env.EnvironmentDefaultBucketName}', - ), - server_access_logs_prefix=f'access_logs/{dataset.S3BucketName}/', - enforce_ssl=True, - versioned=True, - bucket_key_enabled=True, - ) - - dataset_bucket.add_lifecycle_rule( - abort_incomplete_multipart_upload_after=Duration.days(7), - noncurrent_version_transitions=[ - s3.NoncurrentVersionTransition( - storage_class=s3.StorageClass.INFREQUENT_ACCESS, - transition_after=Duration.days(30), - ), - s3.NoncurrentVersionTransition( - storage_class=s3.StorageClass.GLACIER, - transition_after=Duration.days(60), - ), - ], - transitions=[ - s3.Transition( - storage_class=s3.StorageClass.INTELLIGENT_TIERING, - transition_after=Duration.days(90), - ), - s3.Transition( - storage_class=s3.StorageClass.GLACIER, - transition_after=Duration.days(360), - ), - ], - enabled=True, - ) - - # Dataset IAM role - ETL policies - dataset_admin_policy = iam.Policy( - self, - 'DatasetAdminPolicy', - policy_name=dataset.S3BucketName, - statements=[ - iam.PolicyStatement( - sid="ListAll", - actions=[ - "s3:ListAllMyBuckets", - "s3:ListAccessPoints", - ], - resources=["*"], - effect=iam.Effect.ALLOW - ), - iam.PolicyStatement( - sid="ListDatasetBucket", - actions=[ - "s3:ListBucket", - "s3:GetBucketLocation" - ], - resources=[dataset_bucket.bucket_arn], - effect=iam.Effect.ALLOW, - ), - iam.PolicyStatement( - sid="ReadWriteDatasetBucket", - actions=[ - "s3:PutObject", - "s3:PutObjectAcl", - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectVersion", - "s3:DeleteObject" - ], - effect=iam.Effect.ALLOW, - resources=[dataset_bucket.bucket_arn + '/*'], - ), - iam.PolicyStatement( - sid="ReadAccessPointsDatasetBucket", - actions=[ - 's3:GetAccessPoint', - 's3:GetAccessPointPolicy', - 's3:GetAccessPointPolicyStatus', - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:s3:{dataset.region}:{dataset.AwsAccountId}:accesspoint/{dataset.datasetUri}*', - ], - ), - iam.PolicyStatement( - sid="GlueAccessCrawler", - actions=[ - "glue:Get*", - "glue:BatchGet*", - "glue:CreateTable", - "glue:UpdateTable", - "glue:DeleteTableVersion", - "glue:DeleteTable", - ], - effect=iam.Effect.ALLOW, - resources=[ - f"arn:aws:glue:*:{dataset.AwsAccountId}:catalog", - f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:database/{dataset.GlueDatabaseName}", - f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:table/{dataset.GlueDatabaseName}/*" - ] - ), - iam.PolicyStatement( - sid="GlueAccessDefault", - actions=[ - "glue:GetDatabase", - ], - effect=iam.Effect.ALLOW, - resources=[ - f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:database/default", - ] - ), - iam.PolicyStatement( - sid="CreateLoggingGlue", - actions=[ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/crawlers*', - f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/jobs/*', - ], - ), - iam.PolicyStatement( - sid="LoggingGlue", - actions=[ - 'logs:PutLogEvents', - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/crawlers:log-stream:{dataset.GlueCrawlerName}', - f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/jobs/*', - ], - ), - iam.PolicyStatement( - actions=['s3:ListBucket'], - resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}'], - effect=iam.Effect.ALLOW - ), - iam.PolicyStatement( - sid="ReadEnvironmentBucketProfiling", - actions=[ - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectVersion" - ], - effect=iam.Effect.ALLOW, - resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}/profiling/code/*'], - ), - iam.PolicyStatement( - sid="ReadWriteEnvironmentBucketProfiling", - actions=[ - "s3:PutObject", - "s3:PutObjectAcl", - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectVersion", - "s3:DeleteObject" - ], - resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}/profiling/results/{dataset.datasetUri}/*'], - effect=iam.Effect.ALLOW, - ), - ], - ) - if dataset_key: - dataset_admin_policy.add_statements( - iam.PolicyStatement( - sid="KMSAccess", - actions=[ - "kms:Decrypt", - "kms:Encrypt", - "kms:GenerateDataKey" - ], - effect=iam.Effect.ALLOW, - resources=[dataset_key.key_arn], - ) - ) - dataset_admin_policy.node.add_dependency(dataset_bucket) - - dataset_admin_role = iam.Role( - self, - 'DatasetAdminRole', - role_name=dataset.IAMDatasetAdminRoleArn.split('/')[-1], - assumed_by=iam.CompositePrincipal( - iam.ArnPrincipal( - f'arn:aws:iam::{dataset.AwsAccountId}:role/{self.pivot_role_name}' - ), - iam.ServicePrincipal('glue.amazonaws.com'), - ), - ) - dataset_admin_policy.attach_to_role(dataset_admin_role) - - # Add Key Policy For Users - if not dataset.imported: - dataset_key.add_to_resource_policy( - iam.PolicyStatement( - sid="EnableDatasetIAMRoleKeyUsage", - resources=['*'], - effect=iam.Effect.ALLOW, - principals=[dataset_admin_role], - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - "kms:DescribeKey" - ], - ) - ) - - # Datalake location custom resource: registers the S3 location in LakeFormation - registered_location = LakeFormation.check_existing_lf_registered_location( - resource_arn=f'arn:aws:s3:::{dataset.S3BucketName}', - role_arn=dataset.IAMDatasetAdminRoleArn, - accountid=env.AwsAccountId, - region=env.region - ) - - if not registered_location: - storage_location = CfnResource( - self, - 'DatasetStorageLocation', - type='AWS::LakeFormation::Resource', - properties={ - 'ResourceArn': f'arn:aws:s3:::{dataset.S3BucketName}', - 'RoleArn': f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', - 'UseServiceLinkedRole': False, - }, - ) - - # Define dataset admin groups (those with data access grant) - dataset_admins = [ - dataset_admin_role.role_arn, - f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', - env_group.environmentIAMRoleArn, - ] - if quicksight_default_group_arn: - dataset_admins.append(quicksight_default_group_arn) - - # Get the Provider service token from SSM, the Lambda and Provider are created as part of the environment stack - glue_db_provider_service_token = ssm.StringParameter.from_string_parameter_name( - self, - 'GlueDatabaseProviderServiceToken', - string_parameter_name=f'/dataall/{dataset.environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', - ) - - glue_db = CustomResource( - self, - f'{env.resourcePrefix}GlueDatabaseCustomResource', - service_token=glue_db_provider_service_token.string_value, - resource_type='Custom::GlueDatabase', - properties={ - 'CatalogId': dataset.AwsAccountId, - 'DatabaseInput': { - 'Description': 'dataall database {} '.format( - dataset.GlueDatabaseName - ), - 'LocationUri': f's3://{dataset.S3BucketName}/', - 'Name': f'{dataset.GlueDatabaseName}', - 'CreateTableDefaultPermissions': [], - 'Imported': 'IMPORTED-' if dataset.imported else 'CREATED-' - }, - 'DatabaseAdministrators': dataset_admins, - 'TriggerUpdate': True - }, - ) - - # Support resources: GlueCrawler for the dataset, Profiling Job and Trigger - crawler = glue.CfnCrawler( - self, - dataset.GlueCrawlerName, - description=f'datall Glue Crawler for S3 Bucket {dataset.S3BucketName}', - name=dataset.GlueCrawlerName, - database_name=dataset.GlueDatabaseName, - schedule={'scheduleExpression': f'{dataset.GlueCrawlerSchedule}'} - if dataset.GlueCrawlerSchedule - else None, - role=dataset_admin_role.role_arn, - targets=CfnCrawler.TargetsProperty( - s3_targets=[ - CfnCrawler.S3TargetProperty(path=f's3://{dataset.S3BucketName}') - ] - ), - ) - crawler.node.add_dependency(dataset_bucket) - - job_args = { - '--additional-python-modules': 'urllib3<2,pydeequ', - '--datasetUri': dataset.datasetUri, - '--database': dataset.GlueDatabaseName, - '--datasetRegion': dataset.region, - '--dataallRegion': os.getenv('AWS_REGION', 'eu-west-1'), - '--environmentUri': env.environmentUri, - '--environmentBucket': env.EnvironmentDefaultBucketName, - '--datasetBucket': dataset.S3BucketName, - '--apiUrl': 'None', - '--snsTopicArn': 'None', - '--extra-jars': ( - f's3://{env.EnvironmentDefaultBucketName}' - f'/profiling/code/jars/deequ-2.0.0-spark-3.1.jar' - ), - '--enable-metrics': 'true', - '--enable-continuous-cloudwatch-log': 'true', - '--enable-glue-datacatalog': 'true', - '--SPARK_VERSION': '3.1', - } - - job = glue.CfnJob( - self, - 'DatasetGlueProfilingJob', - name=dataset.GlueProfilingJobName, - description=f'datall Glue Profiling job for dataset {dataset.label}', - role=dataset_admin_role.role_arn, - allocated_capacity=10, - execution_property=glue.CfnJob.ExecutionPropertyProperty( - max_concurrent_runs=100 - ), - command=glue.CfnJob.JobCommandProperty( - name='glueetl', - python_version='3', - script_location=( - f's3://{env.EnvironmentDefaultBucketName}' - f'/profiling/code/glue_script.py' - ), - ), - default_arguments=job_args, - glue_version='3.0', - tags={'Application': 'dataall'}, - ) - if dataset.GlueProfilingTriggerSchedule: - trigger = glue.CfnTrigger( - self, - 'DatasetGlueProfilingTrigger', - name=dataset.GlueProfilingTriggerName, - description=f'datall Glue Profiling trigger schedule for dataset {dataset.label}', - type='SCHEDULED', - schedule=dataset.GlueProfilingTriggerSchedule, - start_on_creation=True, - actions=[ - glue.CfnTrigger.ActionProperty( - job_name=dataset.GlueProfilingJobName, arguments=job_args - ) - ], - ) - trigger.node.add_dependency(job) - - Tags.of(self).add('Classification', dataset.confidentiality) - - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) diff --git a/backend/dataall/cdkproxy/stacks/environment.py b/backend/dataall/cdkproxy/stacks/environment.py deleted file mode 100644 index 0368d5dcf..000000000 --- a/backend/dataall/cdkproxy/stacks/environment.py +++ /dev/null @@ -1,800 +0,0 @@ -import json -import logging -import os -import pathlib -import shutil - -from aws_cdk import ( - custom_resources as cr, - aws_ec2 as ec2, - aws_s3 as s3, - aws_s3_deployment, - aws_iam as iam, - aws_lambda as _lambda, - aws_lambda_destinations as lambda_destination, - aws_ssm as ssm, - aws_sns as sns, - aws_sqs as sqs, - aws_sns_subscriptions as sns_subs, - aws_kms as kms, - aws_athena, - RemovalPolicy, - CfnOutput, - Stack, - Duration, - CustomResource, - Tags, -) -from constructs import DependencyGroup - -from .manager import stack -from .pivot_role import PivotRole -from .sagemakerstudio import SageMakerDomain -from .policies.data_policy import DataPolicy -from .policies.service_policy import ServicePolicy -from ... import db -from ...aws.handlers.parameter_store import ParameterStoreManager -from ...aws.handlers.sts import SessionHelper -from ...db import models -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -@stack(stack='environment') -class EnvironmentSetup(Stack): - """Deploy common environment resources: - - default environment S3 Bucket - - Lambda + Provider for dataset Glue Databases custom resource - - Lambda + Provider for dataset Data Lake location custom resource - - SSM parameters for the Lambdas and Providers - - pivotRole (if configured) - - SNS topic (if subscriptions are enabled) - - SM Studio domain (if ML studio is enabled) - - Deploy team specific resources: teams IAM roles, Athena workgroups - - Set PivotRole as Lake formation data lake Admin - lakeformationdefaultsettings custom resource - """ - module_name = __file__ - - @staticmethod - def get_env_name(): - return os.environ.get('envname', 'local') - - def get_engine(self): - engine = db.get_engine(envname=self.get_env_name()) - return engine - - def get_target(self, target_uri) -> models.Environment: - engine = self.get_engine() - with engine.scoped_session() as session: - target = session.query(models.Environment).get(target_uri) - if not target: - raise Exception('ObjectNotFound') - return target - - @staticmethod - def get_environment_group_permissions(engine, environmentUri, group): - with engine.scoped_session() as session: - group_permissions = db.api.Environment.list_group_permissions( - session=session, - username='cdk', - groups=None, - uri=environmentUri, - data={'groupUri': group}, - check_perm=False, - ) - permission_names = [permission.name for permission in group_permissions] - return permission_names - - @staticmethod - def get_environment_groups(engine, environment: models.Environment) -> [models.EnvironmentGroup]: - with engine.scoped_session() as session: - return db.api.Environment.list_environment_invited_groups( - session, - username='cdk', - groups=[], - uri=environment.environmentUri, - data=None, - check_perm=False, - ) - - @staticmethod - def get_environment_admins_group(engine, environment: models.Environment) -> [models.EnvironmentGroup]: - with engine.scoped_session() as session: - return db.api.Environment.get_environment_group( - session, - environment_uri=environment.environmentUri, - group_uri=environment.SamlGroupName, - ) - - @staticmethod - def get_environment_group_datasets(engine, environment: models.Environment, group: str) -> [models.Dataset]: - with engine.scoped_session() as session: - return db.api.Environment.list_group_datasets( - session, - username='cdk', - groups=[], - uri=environment.environmentUri, - data={'groupUri': group}, - check_perm=False, - ) - - @staticmethod - def get_all_environment_datasets(engine, environment: models.Environment) -> [models.Dataset]: - with engine.scoped_session() as session: - return ( - session.query(models.Dataset) - .filter( - models.Dataset.environmentUri == environment.environmentUri, - ) - .all() - ) - - def __init__(self, scope, id, target_uri: str = None, **kwargs): - super().__init__( - scope, - id, - description='Cloud formation stack of ENVIRONMENT: {}; URI: {}; DESCRIPTION: {}'.format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[:1024], - **kwargs, - ) - # Read input - self.target_uri = target_uri - self.pivot_role_name = SessionHelper.get_delegation_role_name() - self.external_id = SessionHelper.get_external_id_secret() - self.dataall_central_account = SessionHelper.get_account() - - pivot_role_as_part_of_environment_stack = ParameterStoreManager.get_parameter_value( - region=os.getenv('AWS_REGION', 'eu-west-1'), - parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate" - ) - self.create_pivot_role = True if pivot_role_as_part_of_environment_stack == "True" else False - self.engine = self.get_engine() - - self._environment = self.get_target(target_uri=target_uri) - - self.environment_groups: [models.EnvironmentGroup] = self.get_environment_groups( - self.engine, environment=self._environment - ) - - self.environment_admins_group: models.EnvironmentGroup = self.get_environment_admins_group( - self.engine, self._environment - ) - - self.all_environment_datasets = self.get_all_environment_datasets(self.engine, self._environment) - - # Create or import Pivot role - if self.create_pivot_role is True: - config = { - 'roleName': self.pivot_role_name, - 'accountId': self.dataall_central_account, - 'externalId': self.external_id, - 'resourcePrefix': self._environment.resourcePrefix, - } - pivot_role_stack = PivotRole(self, 'PivotRoleStack', config) - self.pivot_role = iam.Role.from_role_arn( - self, - f'PivotRole{self._environment.environmentUri}', - pivot_role_stack.pivot_role.role_arn, - ) - else: - self.pivot_role = iam.Role.from_role_arn( - self, - f'PivotRole{self._environment.environmentUri}', - f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', - ) - - # Environment S3 Bucket - default_environment_bucket = s3.Bucket( - self, - 'EnvironmentDefaultBucket', - bucket_name=self._environment.EnvironmentDefaultBucketName, - encryption=s3.BucketEncryption.S3_MANAGED, - removal_policy=RemovalPolicy.RETAIN, - block_public_access=s3.BlockPublicAccess.BLOCK_ALL, - versioned=True, - enforce_ssl=True, - ) - - default_environment_bucket.add_to_resource_policy( - iam.PolicyStatement( - sid='AWSLogDeliveryWrite', - effect=iam.Effect.ALLOW, - principals=[iam.ServicePrincipal('logging.s3.amazonaws.com')], - actions=['s3:PutObject', 's3:PutObjectAcl'], - resources=[f'{default_environment_bucket.bucket_arn}/*'], - ) - ) - - default_environment_bucket.add_lifecycle_rule( - abort_incomplete_multipart_upload_after=Duration.days(7), - noncurrent_version_transitions=[ - s3.NoncurrentVersionTransition( - storage_class=s3.StorageClass.INFREQUENT_ACCESS, - transition_after=Duration.days(30), - ), - s3.NoncurrentVersionTransition( - storage_class=s3.StorageClass.GLACIER, - transition_after=Duration.days(60), - ), - ], - transitions=[ - s3.Transition( - storage_class=s3.StorageClass.INTELLIGENT_TIERING, - transition_after=Duration.days(90), - ), - s3.Transition( - storage_class=s3.StorageClass.GLACIER, - transition_after=Duration.days(360), - ), - ], - enabled=True, - ) - - profiling_assetspath = self.zip_code( - os.path.realpath(os.path.abspath(os.path.join(__file__, '..', '..', 'assets', 'glueprofilingjob'))) - ) - - aws_s3_deployment.BucketDeployment( - self, - f'{self._environment.resourcePrefix}GlueProflingJobDeployment', - sources=[aws_s3_deployment.Source.asset(profiling_assetspath)], - destination_bucket=default_environment_bucket, - destination_key_prefix='profiling/code', - ) - - # Create or import team IAM roles - default_role = self.create_or_import_environment_admin_group_role() - group_roles = self.create_or_import_environment_groups_roles() - - self.create_default_athena_workgroup( - default_environment_bucket, - self._environment.EnvironmentDefaultAthenaWorkGroup, - ) - self.create_athena_workgroups(self.environment_groups, default_environment_bucket) - - kms_key = self.set_cr_kms_key(group_roles, default_role) - - # Lakeformation default settings custom resource - # Set PivotRole as Lake Formation data lake admin - entry_point = str( - pathlib.PosixPath(os.path.dirname(__file__), '../assets/lakeformationdefaultsettings').resolve() - ) - - lakeformation_cr_dlq = self.set_dlq( - f'{self._environment.resourcePrefix}-lfcr-{self._environment.environmentUri}', - kms_key - ) - lf_default_settings_custom_resource = _lambda.Function( - self, - 'LakeformationDefaultSettingsHandler', - function_name=f'{self._environment.resourcePrefix}-lf-settings-handler-{self._environment.environmentUri}', - role=self.pivot_role, - handler='index.on_event', - code=_lambda.Code.from_asset(entry_point), - memory_size=1664, - description='This Lambda function is a cloudformation custom resource provider for Lakeformation default settings', - timeout=Duration.seconds(5 * 60), - environment={ - 'envname': self._environment.name, - 'LOG_LEVEL': 'DEBUG', - 'AWS_ACCOUNT': self._environment.AwsAccountId, - 'DEFAULT_ENV_ROLE_ARN': self._environment.EnvironmentDefaultIAMRoleArn, - 'DEFAULT_CDK_ROLE_ARN': self._environment.CDKRoleArn, - }, - dead_letter_queue_enabled=True, - dead_letter_queue=lakeformation_cr_dlq, - on_failure=lambda_destination.SqsDestination(lakeformation_cr_dlq), - runtime=_lambda.Runtime.PYTHON_3_9, - ) - LakeformationDefaultSettingsProvider = cr.Provider( - self, - f'{self._environment.resourcePrefix}LakeformationDefaultSettingsProvider', - on_event_handler=lf_default_settings_custom_resource, - ) - - default_lf_settings = CustomResource( - self, - f'{self._environment.resourcePrefix}DefaultLakeFormationSettings', - service_token=LakeformationDefaultSettingsProvider.service_token, - resource_type='Custom::LakeformationDefaultSettings', - properties={ - 'DataLakeAdmins': [ - f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', - ] - }, - ) - - ssm.StringParameter( - self, - 'LakeformationDefaultSettingsCustomeResourceFunctionArn', - string_value=lf_default_settings_custom_resource.function_arn, - parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/lf/defaultsettings/lambda/arn', - ) - - ssm.StringParameter( - self, - 'LakeformationDefaultSettingsCustomeResourceFunctionName', - string_value=lf_default_settings_custom_resource.function_name, - parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/lf/defaultsettings/lambda/name', - ) - - # Glue database custom resource - New - # This Lambda is triggered with the creation of each dataset, it is not executed when the environment is created - entry_point = str( - pathlib.PosixPath(os.path.dirname(__file__), '../assets/gluedatabasecustomresource').resolve() - ) - - gluedb_lf_cr_dlq = self.set_dlq( - f'{self._environment.resourcePrefix}-gluedb-lf-cr-{self._environment.environmentUri}', - kms_key - ) - gluedb_lf_custom_resource = _lambda.Function( - self, - 'GlueDatabaseLFCustomResourceHandler', - function_name=f'{self._environment.resourcePrefix}-gluedb-lf-handler-{self._environment.environmentUri}', - role=self.pivot_role, - handler='index.on_event', - code=_lambda.Code.from_asset(entry_point), - memory_size=1664, - description='This Lambda function is a cloudformation custom resource provider for Glue database ' - 'as Cfn currently does not support the CreateTableDefaultPermissions parameter', - timeout=Duration.seconds(5 * 60), - environment={ - 'envname': self._environment.name, - 'LOG_LEVEL': 'DEBUG', - 'AWS_ACCOUNT': self._environment.AwsAccountId, - 'DEFAULT_ENV_ROLE_ARN': self._environment.EnvironmentDefaultIAMRoleArn, - 'DEFAULT_CDK_ROLE_ARN': self._environment.CDKRoleArn, - }, - dead_letter_queue_enabled=True, - dead_letter_queue=gluedb_lf_cr_dlq, - on_failure=lambda_destination.SqsDestination(gluedb_lf_cr_dlq), - tracing=_lambda.Tracing.ACTIVE, - runtime=_lambda.Runtime.PYTHON_3_9, - ) - - glue_db_provider = cr.Provider( - self, - f'{self._environment.resourcePrefix}GlueDbCustomResourceProvider', - on_event_handler=gluedb_lf_custom_resource - ) - ssm.StringParameter( - self, - 'GlueLFCustomResourceFunctionArn', - string_value=gluedb_lf_custom_resource.function_arn, - parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/lambda/arn', - ) - - ssm.StringParameter( - self, - 'GlueLFCustomResourceFunctionName', - string_value=gluedb_lf_custom_resource.function_name, - parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/lambda/name', - ) - - ssm.StringParameter( - self, - 'GlueLFCustomResourceProviderServiceToken', - string_value=glue_db_provider.service_token, - parameter_name=f'/dataall/{self._environment.environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', - ) - - # Create SNS topics for subscriptions - if self._environment.subscriptionsEnabled: - subscription_key_policy = iam.PolicyDocument( - assign_sids=True, - statements=[ - iam.PolicyStatement( - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - ], - effect=iam.Effect.ALLOW, - principals=[default_role] + group_roles, - resources=["*"], - conditions={ - "StringEquals": { - "kms:ViaService": [ - f"sqs.{self._environment.region}.amazonaws.com", - f"sns.{self._environment.region}.amazonaws.com", - ] - } - } - ), - iam.PolicyStatement( - actions=[ - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ], - effect=iam.Effect.ALLOW, - principals=[default_role] + group_roles, - resources=["*"], - ) - ] - ) - subscription_key = kms.Key( - self, - f'dataall-env-{self._environment.environmentUri}-subscription-key', - removal_policy=RemovalPolicy.DESTROY, - alias=f'dataall-env-{self._environment.environmentUri}-subscription-key', - enable_key_rotation=True, - admins=[ - iam.ArnPrincipal(self._environment.CDKRoleArn), - ], - policy=subscription_key_policy - ) - - dlq_queue = sqs.Queue( - self, - f'ProducersSubscriptionsQueue-{self._environment.environmentUri}-dlq', - queue_name=f'{self._environment.resourcePrefix}-producers-dlq-{self._environment.environmentUri}', - retention_period=Duration.days(14), - encryption=sqs.QueueEncryption.KMS, - encryption_master_key=subscription_key, - ) - dlq_queue.add_to_resource_policy( - iam.PolicyStatement( - sid='Enforce TLS for all principals', - effect=iam.Effect.DENY, - principals=[ - iam.AnyPrincipal(), - ], - actions=[ - 'sqs:*', - ], - resources=[dlq_queue.queue_arn], - conditions={ - 'Bool': {'aws:SecureTransport': 'false'}, - }, - ) - ) - self.dlq = sqs.DeadLetterQueue(max_receive_count=2, queue=dlq_queue) - queue = sqs.Queue( - self, - f'ProducersSubscriptionsQueue-{self._environment.environmentUri}', - queue_name=f'{self._environment.resourcePrefix}-producers-queue-{self._environment.environmentUri}', - dead_letter_queue=self.dlq, - encryption=sqs.QueueEncryption.KMS, - encryption_master_key=subscription_key, - ) - - if self._environment.subscriptionsProducersTopicImported: - topic = sns.Topic.from_topic_arn( - self, - 'ProducersTopicImported', - f'arn:aws:sns:{self._environment.region}:{self._environment.AwsAccountId}:{self._environment.subscriptionsProducersTopicName}', - ) - else: - topic = self.create_topic( - self._environment.subscriptionsProducersTopicName, - self.dataall_central_account, - self._environment, - subscription_key - ) - - topic.add_subscription(sns_subs.SqsSubscription(queue)) - - policy = sqs.QueuePolicy( - self, - f'{self._environment.resourcePrefix}ProducersSubscriptionsQueuePolicy', - queues=[queue], - ) - - policy.document.add_statements( - iam.PolicyStatement( - principals=[iam.AccountPrincipal(self.dataall_central_account)], - effect=iam.Effect.ALLOW, - actions=[ - 'sqs:ReceiveMessage', - 'sqs:DeleteMessage', - 'sqs:ChangeMessageVisibility', - 'sqs:GetQueueUrl', - 'sqs:GetQueueAttributes', - ], - resources=[queue.queue_arn], - ), - iam.PolicyStatement( - principals=[iam.ServicePrincipal('sns.amazonaws.com')], - effect=iam.Effect.ALLOW, - actions=['sqs:SendMessage'], - resources=[queue.queue_arn], - conditions={'ArnEquals': {'aws:SourceArn': topic.topic_arn}}, - ), - iam.PolicyStatement( - sid='Enforce TLS for all principals', - effect=iam.Effect.DENY, - principals=[ - iam.AnyPrincipal(), - ], - actions=[ - 'sqs:*', - ], - resources=[queue.queue_arn], - conditions={ - 'Bool': {'aws:SecureTransport': 'false'}, - }, - ), - ) - policy.node.add_dependency(topic) - - self.create_topic( - self._environment.subscriptionsConsumersTopicName, - self.dataall_central_account, - self._environment, - subscription_key - ) - - # Create or import SageMaker Studio domain if ML Studio enabled - domain = SageMakerDomain( - stack=self, - id='SageMakerDomain', - environment=self._environment - ) - self.existing_sagemaker_domain = domain.check_existing_sagemaker_studio_domain() - if self._environment.mlStudiosEnabled and not self.existing_sagemaker_domain: - # Create dependency group - Sagemaker depends on group IAM roles - sagemaker_dependency_group = DependencyGroup() - sagemaker_dependency_group.add(default_role) - for group_role in group_roles: - sagemaker_dependency_group.add(group_role) - - sagemaker_domain = domain.create_sagemaker_domain_resources(sagemaker_principals=[default_role] + group_roles) - - sagemaker_domain.node.add_dependency(sagemaker_dependency_group) - - # print the IAM role arn for this service account - CfnOutput( - self, - f'pivotRoleName-{self._environment.environmentUri}', - export_name=f'pivotRoleName-{self._environment.environmentUri}', - value=self.pivot_role_name, - description='pivotRole name, helps us to distinguish between auto-created pivot roles (dataallPivotRole-cdk) and manually created pivot roles (dataallPivotRole)', - ) - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) - - def create_or_import_environment_admin_group_role(self): - if self._environment.EnvironmentDefaultIAMRoleImported: - default_role = iam.Role.from_role_arn( - self, - f'EnvironmentRole{self._environment.environmentUri}Imported', - self._environment.EnvironmentDefaultIAMRoleArn, - ) - else: - environment_admin_group_role = self.create_group_environment_role(group=self.environment_admins_group, id='DefaultEnvironmentRole') - return environment_admin_group_role - - def create_or_import_environment_groups_roles(self): - group: models.EnvironmentGroup - group_roles = [] - for group in self.environment_groups: - if not group.environmentIAMRoleImported: - group_role = self.create_group_environment_role(group=group, id=f'{group.environmentIAMRoleName}') - group_roles.append(group_role) - else: - iam.Role.from_role_arn( - self, - f'{group.groupUri + group.environmentIAMRoleName}', - role_arn=f'arn:aws:iam::{self._environment.AwsAccountId}:role/{group.environmentIAMRoleName}', - ) - return group_roles - - def create_group_environment_role(self, group: models.EnvironmentGroup, id: str): - - group_permissions = self.get_environment_group_permissions( - self.engine, self._environment.environmentUri, group.groupUri - ) - services_policies = ServicePolicy( - stack=self, - tag_key='Team', - tag_value=group.groupUri, - resource_prefix=self._environment.resourcePrefix, - name=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', - id=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', - role_name=group.environmentIAMRoleName, - account=self._environment.AwsAccountId, - region=self._environment.region, - environment=self._environment, - team=group, - permissions=group_permissions, - ).generate_policies() - - data_policy = DataPolicy( - stack=self, - tag_key='Team', - tag_value=group.groupUri, - resource_prefix=self._environment.resourcePrefix, - name=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', - id=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', - account=self._environment.AwsAccountId, - region=self._environment.region, - environment=self._environment, - team=group, - datasets=self.get_environment_group_datasets(self.engine, self._environment, group.groupUri), - ).generate_data_access_policy() - - group_role = iam.Role( - self, - id, - role_name=group.environmentIAMRoleName, - inline_policies={ - f'{group.environmentIAMRoleName}DataPolicy': data_policy.document, - }, - managed_policies=services_policies, - assumed_by=iam.CompositePrincipal( - iam.ServicePrincipal('glue.amazonaws.com'), - iam.ServicePrincipal('lambda.amazonaws.com'), - iam.ServicePrincipal('sagemaker.amazonaws.com'), - iam.ServicePrincipal('states.amazonaws.com'), - iam.ServicePrincipal('databrew.amazonaws.com'), - iam.ServicePrincipal('codebuild.amazonaws.com'), - iam.ServicePrincipal('codepipeline.amazonaws.com'), - self.pivot_role, - ), - ) - Tags.of(group_role).add('group', group.groupUri) - return group_role - - def create_default_athena_workgroup(self, output_bucket, workgroup_name): - return self.create_athena_workgroup(output_bucket, workgroup_name) - - def create_athena_workgroups(self, environment_groups, default_environment_bucket): - for group in environment_groups: - self.create_athena_workgroup(default_environment_bucket, group.environmentAthenaWorkGroup) - - def create_athena_workgroup(self, output_bucket, workgroup_name): - athena_workgroup_output_location = ''.join( - ['s3://', output_bucket.bucket_name, '/athenaqueries/', workgroup_name, '/'] - ) - athena_workgroup = aws_athena.CfnWorkGroup( - self, - f'AthenaWorkGroup{workgroup_name}', - name=workgroup_name, - state='ENABLED', - recursive_delete_option=True, - work_group_configuration=aws_athena.CfnWorkGroup.WorkGroupConfigurationProperty( - enforce_work_group_configuration=True, - result_configuration=aws_athena.CfnWorkGroup.ResultConfigurationProperty( - encryption_configuration=aws_athena.CfnWorkGroup.EncryptionConfigurationProperty( - encryption_option='SSE_S3', - ), - output_location=athena_workgroup_output_location, - ), - requester_pays_enabled=False, - publish_cloud_watch_metrics_enabled=False, - engine_version=aws_athena.CfnWorkGroup.EngineVersionProperty( - selected_engine_version='Athena engine version 2', - ), - ), - ) - return athena_workgroup - - def create_topic(self, construct_id, central_account, environment, kms_key): - actions = [ - 'SNS:GetTopicAttributes', - 'SNS:SetTopicAttributes', - 'SNS:AddPermission', - 'SNS:RemovePermission', - 'SNS:DeleteTopic', - 'SNS:Subscribe', - 'SNS:ListSubscriptionsByTopic', - 'SNS:Publish', - 'SNS:Receive', - ] - topic = sns.Topic( - self, - f'{construct_id}', - topic_name=f'{construct_id}', - master_key=kms_key - ) - - topic.add_to_resource_policy( - iam.PolicyStatement( - principals=[iam.AccountPrincipal(central_account)], - effect=iam.Effect.ALLOW, - actions=actions, - resources=[topic.topic_arn], - ) - ) - topic.add_to_resource_policy( - iam.PolicyStatement( - principals=[iam.AccountPrincipal(environment.AwsAccountId)], - effect=iam.Effect.ALLOW, - actions=actions, - resources=[topic.topic_arn], - ) - ) - return topic - - @staticmethod - def zip_code(assetspath, s3_key='profiler'): - logger.info('Zipping code') - shutil.make_archive(base_name=f'{assetspath}/{s3_key}', format='zip', root_dir=f'{assetspath}') - return assetspath - - def set_cr_kms_key(self, group_roles, default_role) -> kms.Key: - key_policy = iam.PolicyDocument( - assign_sids=True, - statements=[ - iam.PolicyStatement( - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - ], - effect=iam.Effect.ALLOW, - principals=[ - default_role, - ] + group_roles, - resources=["*"], - conditions={ - "StringEquals": {"kms:ViaService": f"sqs.{self._environment.region}.amazonaws.com"} - } - ), - iam.PolicyStatement( - actions=[ - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ], - effect=iam.Effect.ALLOW, - principals=[ - default_role, - ] + group_roles, - resources=["*"], - ) - ] - ) - - kms_key = kms.Key( - self, - f'dataall-environment-{self._environment.environmentUri}-cr-key', - removal_policy=RemovalPolicy.DESTROY, - alias=f'dataall-environment-{self._environment.environmentUri}-cr-key', - enable_key_rotation=True, - admins=[ - iam.ArnPrincipal(self._environment.CDKRoleArn), - ], - policy=key_policy - ) - return kms_key - - def set_dlq(self, queue_name, kms_key) -> sqs.Queue: - dlq = sqs.Queue( - self, - f'{queue_name}-queue', - queue_name=f'{queue_name}', - retention_period=Duration.days(14), - encryption=sqs.QueueEncryption.KMS, - encryption_master_key=kms_key, - data_key_reuse=Duration.days(1), - removal_policy=RemovalPolicy.DESTROY, - ) - - enforce_tls_statement = iam.PolicyStatement( - sid='Enforce TLS for all principals', - effect=iam.Effect.DENY, - principals=[ - iam.AnyPrincipal(), - ], - actions=[ - 'sqs:*', - ], - resources=[dlq.queue_arn], - conditions={ - 'Bool': {'aws:SecureTransport': 'false'}, - }, - ) - - dlq.add_to_resource_policy(enforce_tls_statement) - return dlq diff --git a/backend/dataall/cdkproxy/stacks/notebook.py b/backend/dataall/cdkproxy/stacks/notebook.py deleted file mode 100644 index de50484eb..000000000 --- a/backend/dataall/cdkproxy/stacks/notebook.py +++ /dev/null @@ -1,159 +0,0 @@ -import logging -import os - -from aws_cdk import ( - aws_sagemaker as sagemaker, - aws_ec2 as ec2, - aws_kms as kms, - aws_iam as iam, - Stack, - CfnOutput, -) - -from .manager import stack -from ... import db -from ...db import models -from ...db.api import Environment -from ...utils.cdk_nag_utils import CDKNagUtil -from ...aws.handlers.sts import SessionHelper -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -@stack(stack='notebook') -class SagemakerNotebook(Stack): - module_name = __file__ - - def get_engine(self) -> db.Engine: - envname = os.environ.get('envname', 'local') - engine = db.get_engine(envname=envname) - return engine - - def get_target(self, target_uri) -> models.SagemakerNotebook: - engine = self.get_engine() - with engine.scoped_session() as session: - notebook = session.query(models.SagemakerNotebook).get(target_uri) - return notebook - - def get_env_group( - self, notebook: models.SagemakerNotebook - ) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, notebook.SamlAdminGroupName, notebook.environmentUri - ) - return env - - def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: - super().__init__(scope, - id, - description="Cloud formation stack of NOTEBOOK: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[:1024], - **kwargs) - - # Required for dynamic stack tagging - self.target_uri = target_uri - - notebook = self.get_target(target_uri=target_uri) - - env_group = self.get_env_group(notebook) - - cdk_exec_role = SessionHelper.get_cdk_exec_role_arn(notebook.AWSAccountId, notebook.region) - - notebook_key = kms.Key( - self, - 'NotebookKmsKey', - alias=notebook.NotebookInstanceName, - enable_key_rotation=True, - admins=[ - iam.ArnPrincipal(cdk_exec_role), - ], - policy=iam.PolicyDocument( - assign_sids=True, - statements=[ - iam.PolicyStatement( - resources=['*'], - effect=iam.Effect.ALLOW, - principals=[ - iam.ArnPrincipal(notebook.RoleArn) - ], - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - "kms:DescribeKey" - ], - conditions={ - "StringEquals": {"kms:ViaService": f"sagemaker.{notebook.region}.amazonaws.com"} - } - ), - iam.PolicyStatement( - resources=['*'], - effect=iam.Effect.ALLOW, - principals=[ - iam.ArnPrincipal(notebook.RoleArn) - ], - actions=[ - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ] - ) - ], - ), - ) - - if not (notebook.VpcId and notebook.SubnetId): - sagemaker.CfnNotebookInstance( - self, - f'Notebook{target_uri}', - instance_type=notebook.InstanceType, - role_arn=notebook.RoleArn, - direct_internet_access='Enabled', - notebook_instance_name=notebook.NotebookInstanceName, - kms_key_id=notebook_key.key_id, - ) - else: - vpc = ec2.Vpc.from_lookup(self, 'NotebookVPC', vpc_id=notebook.VpcId) - security_group = ec2.SecurityGroup( - self, - f'sgNotebook{target_uri}', - vpc=vpc, - allow_all_outbound=True, - security_group_name=notebook.NotebookInstanceName, - ) - security_group.connections.allow_from( - ec2.Peer.ipv4(vpc.vpc_cidr_block), - ec2.Port.tcp(443), - 'Allow inbound HTTPS', - ) - - sagemaker.CfnNotebookInstance( - self, - f'Notebook{target_uri}', - instance_type=notebook.InstanceType, - role_arn=notebook.RoleArn, - direct_internet_access='Disabled', - subnet_id=notebook.SubnetId, - security_group_ids=[security_group.security_group_id], - notebook_instance_name=notebook.NotebookInstanceName, - kms_key_id=notebook_key.key_id, - volume_size_in_gb=notebook.VolumeSizeInGB, - ) - - CfnOutput( - self, - 'NotebookInstanceName', - export_name=f'{notebook.notebookUri}-NotebookInstanceName', - value=notebook.NotebookInstanceName, - ) - - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) diff --git a/backend/dataall/cdkproxy/stacks/pipeline.py b/backend/dataall/cdkproxy/stacks/pipeline.py deleted file mode 100644 index f132809c2..000000000 --- a/backend/dataall/cdkproxy/stacks/pipeline.py +++ /dev/null @@ -1,582 +0,0 @@ -import logging -import os -import shutil -import subprocess -from typing import List - - -from aws_cdk import aws_codebuild as codebuild, Stack, RemovalPolicy, CfnOutput -from aws_cdk import aws_codecommit as codecommit -from aws_cdk import aws_codepipeline as codepipeline -from aws_cdk import aws_codepipeline_actions as codepipeline_actions - -from aws_cdk import aws_iam as iam -from aws_cdk import aws_kms as kms - -from aws_cdk.aws_s3_assets import Asset -from botocore.exceptions import ClientError - -from .manager import stack -from ...aws.handlers.sts import SessionHelper -from ... import db -from ...db import models -from ...db.api import Environment, Pipeline, Dataset -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -@stack("pipeline") -class PipelineStack(Stack): - """ - Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. - - The pipeline is based on CodePipeline pipelines - - - Defaults for source/synth - CodeCommit & cdk synth - - blueprint with DDK application code added in the CodeCommit repository - - ability to define development stages: dev, test, prod - - ability to select gitflow or trunk-based as development strategy - - Ability to connect to private artifactory to pull artifacts from at synth - - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys - - data.all metadata as environment variables accesible at synth - - """ - - module_name = __file__ - - def get_engine(self): - envname = os.environ.get("envname", "local") - engine = db.get_engine(envname=envname) - return engine - - def get_target(self, target_uri) -> models.DataPipeline: - engine = self.get_engine() - with engine.scoped_session() as session: - return Pipeline.get_pipeline_by_uri(session, target_uri) - - def get_pipeline_environments(self, targer_uri) -> models.DataPipelineEnvironment: - engine = self.get_engine() - with engine.scoped_session() as session: - envs = Pipeline.query_pipeline_environments( - session, targer_uri - ) - return envs - - def get_pipeline_cicd_environment( - self, pipeline: models.DataPipeline - ) -> models.Environment: - envname = os.environ.get("envname", "local") - engine = db.get_engine(envname=envname) - with engine.scoped_session() as session: - return Environment.get_environment_by_uri(session, pipeline.environmentUri) - - def get_env_team(self, pipeline: models.DataPipeline) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, pipeline.SamlGroupName, pipeline.environmentUri - ) - return env - - def get_dataset(self, dataset_uri) -> models.Dataset: - engine = self.get_engine() - with engine.scoped_session() as session: - ds = Dataset.get_dataset_by_uri( - session, dataset_uri - ) - return ds - - def __init__(self, scope, id, target_uri: str = None, **kwargs): - kwargs.setdefault("tags", {}).update({"utility": "dataall-data-pipeline"}) - super().__init__( - scope, - id, - env=kwargs.get("env"), - stack_name=kwargs.get("stack_name"), - tags=kwargs.get("tags"), - description="Cloud formation stack of PIPELINE: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[ - :1024 - ], - ) - - # Configuration - self.target_uri = target_uri - - pipeline = self.get_target(target_uri=target_uri) - pipeline_environment = self.get_pipeline_cicd_environment(pipeline=pipeline) - pipeline_env_team = self.get_env_team(pipeline=pipeline) - # Development environments - development_environments = self.get_pipeline_environments(targer_uri=target_uri) - self.devStages = [env.stage for env in development_environments] - - # Support resources - build_role_policy = iam.Policy( - self, - f"{pipeline.name}-policy", - policy_name=f"{pipeline.name}-policy", - statements=self.make_codebuild_policy_statements( - pipeline_environment=pipeline_environment, - pipeline_env_team=pipeline_env_team, - pipeline=pipeline - ), - ) - - build_project_role = iam.Role( - self, - "PipelineRole", - role_name=pipeline.name, - inline_policies={f"Inline{pipeline.name}": build_role_policy.document}, - assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), - ) - - self.codebuild_key = kms.Key( - self, - f"{pipeline.name}-codebuild-key", - removal_policy=RemovalPolicy.DESTROY, - alias=f"{pipeline.name}-codebuild-key", - enable_key_rotation=True, - admins=[ - iam.ArnPrincipal(pipeline_environment.CDKRoleArn), - ], - policy=iam.PolicyDocument( - statements=[ - iam.PolicyStatement( - resources=["*"], - effect=iam.Effect.ALLOW, - principals=[ - build_project_role - ], - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - ], - ), - iam.PolicyStatement( - resources=["*"], - effect=iam.Effect.ALLOW, - principals=[ - iam.ArnPrincipal(pipeline_env_team.environmentIAMRoleArn), - build_project_role - ], - actions=[ - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ], - ) - ], - ), - ) - - # Create CodeCommit repository and mirror blueprint code - code_dir_path = os.path.realpath( - os.path.abspath( - os.path.join( - __file__, "..", "..", "blueprints", "data_pipeline_blueprint" - ) - ) - ) - logger.info(f"code directory path = {code_dir_path}") - env_vars, aws = PipelineStack._set_env_vars(pipeline_environment) - try: - repository = PipelineStack._check_repository(aws, pipeline_environment.region, pipeline.repo) - if repository: - PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file="ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - - logger.info(f"Pipeline Repo {pipeline.repo} Exists...Handling Update") - update_cmds = [ - f'REPO_NAME={pipeline.repo}', - 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', - 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', - ] - - process = subprocess.run( - "; ".join(update_cmds), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=code_dir_path, - env=env_vars - ) - else: - raise Exception - except Exception as e: - PipelineStack.initialize_repo(pipeline, code_dir_path, env_vars) - - PipelineStack.write_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/deploy_buildspec.yaml") - - PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file=f"{pipeline.repo}/ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) - - logger.info(f"Pipeline Repo {pipeline.repo} Does Not Exists... Creating Repository") - - PipelineStack.cleanup_zip_directory(code_dir_path) - - PipelineStack.zip_directory(os.path.join(code_dir_path, pipeline.repo)) - code_asset = Asset( - scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/{pipeline.repo}/code.zip" - ) - - code = codecommit.CfnRepository.CodeProperty( - s3=codecommit.CfnRepository.S3Property( - bucket=code_asset.s3_bucket_name, - key=code_asset.s3_object_key, - ) - ) - - repository = codecommit.CfnRepository( - scope=self, - code=code, - id="CodecommitRepository", - repository_name=pipeline.repo, - ) - repository.apply_removal_policy(RemovalPolicy.RETAIN) - - if pipeline.devStrategy == "trunk": - codepipeline_pipeline = codepipeline.Pipeline( - scope=self, - id=pipeline.name, - pipeline_name=pipeline.name, - restart_execution_on_update=True, - ) - self.codepipeline_pipeline = codepipeline_pipeline - self.source_artifact = codepipeline.Artifact() - - codepipeline_pipeline.add_stage( - stage_name='Source', - actions=[ - codepipeline_actions.CodeCommitSourceAction( - action_name='CodeCommit', - branch='main', - output=self.source_artifact, - trigger=codepipeline_actions.CodeCommitTrigger.POLL, - repository=codecommit.Repository.from_repository_name( - self, 'source_blueprint_repo', repository_name=pipeline.repo - ), - ) - ], - ) - - for env in sorted(development_environments, key=lambda env: env.order): - buildspec = "deploy_buildspec.yaml" - build_project = codebuild.PipelineProject( - scope=self, - id=f'{pipeline.name}-build-{env.stage}', - environment=codebuild.BuildEnvironment( - privileged=True, - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, - environment_variables=PipelineStack.make_environment_variables( - pipeline=pipeline, - pipeline_environment=env, - pipeline_env_team=env.samlGroupName, - stage=env.stage, - stages=self.devStages - ), - ), - role=build_project_role, - build_spec=codebuild.BuildSpec.from_source_filename(buildspec), - encryption_key=self.codebuild_key, - ) - - self.codepipeline_pipeline.add_stage( - stage_name=f'Deploy-Stage-{env.stage}', - actions=[ - codepipeline_actions.CodeBuildAction( - action_name=f'deploy-{env.stage}', - input=self.source_artifact, - project=build_project, - outputs=[codepipeline.Artifact()], - ) - ], - ) - - # Skip manual approval for one stage pipelines and for last stage - if env.order < development_environments.count(): - self.codepipeline_pipeline.add_stage( - stage_name=f'ManualApproval-{env.stage}', - actions=[ - codepipeline_actions.ManualApprovalAction( - action_name=f'ManualApproval-{env.stage}' - ) - ], - ) - - else: - for env in development_environments: - branch_name = 'main' if (env.stage == 'prod') else env.stage - buildspec = "deploy_buildspec.yaml" - - codepipeline_pipeline = codepipeline.Pipeline( - scope=self, - id=f"{pipeline.name}-{env.stage}", - pipeline_name=f"{pipeline.name}-{env.stage}", - restart_execution_on_update=True, - ) - self.codepipeline_pipeline = codepipeline_pipeline - self.source_artifact = codepipeline.Artifact() - - codepipeline_pipeline.add_stage( - stage_name=f'Source-{env.stage}', - actions=[ - codepipeline_actions.CodeCommitSourceAction( - action_name='CodeCommit', - branch=branch_name, - output=self.source_artifact, - trigger=codepipeline_actions.CodeCommitTrigger.POLL, - repository=codecommit.Repository.from_repository_name( - self, f'source_blueprint_repo_{env.stage}', repository_name=pipeline.repo - ), - ) - ], - ) - - build_project = codebuild.PipelineProject( - scope=self, - id=f'{pipeline.name}-build-{env.stage}', - environment=codebuild.BuildEnvironment( - privileged=True, - build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, - environment_variables=PipelineStack.make_environment_variables( - pipeline=pipeline, - pipeline_environment=env, - pipeline_env_team=env.samlGroupName, - stage=env.stage, - stages=self.devStages - ), - ), - role=build_project_role, - build_spec=codebuild.BuildSpec.from_source_filename(buildspec), - encryption_key=self.codebuild_key, - ) - - self.codepipeline_pipeline.add_stage( - stage_name=f'Deploy-Stage-{env.stage}', - actions=[ - codepipeline_actions.CodeBuildAction( - action_name=f'deploy-{env.stage}', - input=self.source_artifact, - project=build_project, - outputs=[codepipeline.Artifact()], - ) - ], - ) - - # CloudFormation output - CfnOutput( - self, - "RepoNameOutput", - export_name=f"{pipeline.DataPipelineUri}-RepositoryName", - value=pipeline.repo, - ) - CfnOutput( - self, - "PipelineNameOutput", - export_name=f"{pipeline.DataPipelineUri}-PipelineName", - value=codepipeline_pipeline.pipeline_name, - ) - - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) - - PipelineStack.cleanup_zip_directory(code_dir_path) - PipelineStack.cleanup_pipeline_directory(os.path.join(code_dir_path, pipeline.repo)) - - @staticmethod - def zip_directory(path): - try: - shutil.make_archive("code", "zip", path) - shutil.move("code.zip", f"{path}/code.zip") - except Exception as e: - logger.error(f"Failed to zip repository due to: {e}") - - @staticmethod - def cleanup_zip_directory(path): - if os.path.isfile(f"{path}/code.zip"): - os.remove(f"{path}/code.zip") - else: - logger.info("Info: %s Zip not found" % f"{path}/code.zip") - - @staticmethod - def cleanup_pipeline_directory(path): - if os.path.isdir(path): - shutil.rmtree(path) - else: - logger.info("Info: %s Directory not found" % f"{path}") - - @staticmethod - def make_environment_variables( - pipeline, - pipeline_environment, - pipeline_env_team, - stage, - stages - ): - - env_vars_1 = { - "PIPELINE_URI": codebuild.BuildEnvironmentVariable(value=pipeline.DataPipelineUri), - "PIPELINE_NAME": codebuild.BuildEnvironmentVariable(value=pipeline.name), - "STAGE": codebuild.BuildEnvironmentVariable(value=stage), - "DEV_STAGES": codebuild.BuildEnvironmentVariable(value=stages), - "DEV_STRATEGY": codebuild.BuildEnvironmentVariable(value=pipeline.devStrategy), - "TEMPLATE": codebuild.BuildEnvironmentVariable(value=pipeline.template), - "ENVIRONMENT_URI": codebuild.BuildEnvironmentVariable(value=pipeline_environment.environmentUri), - "AWSACCOUNTID": codebuild.BuildEnvironmentVariable(value=pipeline_environment.AwsAccountId), - "AWSREGION": codebuild.BuildEnvironmentVariable(value=pipeline_environment.region), - "ENVTEAM_ROLENAME": codebuild.BuildEnvironmentVariable(value=pipeline_env_team), - } - env_vars = dict(env_vars_1) - return env_vars - - @staticmethod - def write_deploy_buildspec(path, output_file): - yaml = """ - version: '0.2' - env: - git-credential-helper: yes - phases: - pre_build: - commands: - - n 16.15.1 - - npm install -g aws-cdk - - pip install aws-ddk - - pip install -r requirements.txt - build: - commands: - - aws sts get-caller-identity - - ddk deploy - """ - with open(f'{path}/{output_file}', 'x') as text_file: - print(yaml, file=text_file) - - @staticmethod - def make_codebuild_policy_statements( - pipeline_environment, - pipeline_env_team, - pipeline - ) -> List[iam.PolicyStatement]: - return [ - iam.PolicyStatement( - actions=[ - "ec2:DescribeAvailabilityZones", - "secretsmanager:GetSecretValue", - "secretsmanager:DescribeSecret", - "ssm:GetParametersByPath", - "ssm:GetParameters", - "ssm:GetParameter", - "codebuild:CreateReportGroup", - "codebuild:CreateReport", - "codebuild:UpdateReport", - "codebuild:BatchPutTestCases", - "codebuild:BatchPutCodeCoverages", - "codecommit:ListRepositories", - "sts:AssumeRole", - "cloudformation:DescribeStacks" - ], - resources=["*"], - ), - iam.PolicyStatement( - actions=[ - "codecommit:*" - ], - resources=[f"arn:aws:codecommit:{pipeline_environment.region}:{pipeline_environment.AwsAccountId}:{pipeline.repo}"], - ) - ] - - @staticmethod - def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): - json_envs = "" - for env in development_environments: - json_env = f""", - "{env.stage}": {{ - "account": "{env.AwsAccountId}", - "region": "{env.region}", - "stage": "{env.stage}", - "env_vars": {{ - "database": "example_database", - "Team": "{env.samlGroupName}" - }} - }}""" - json_envs = json_envs + json_env - - json = f"""{{ - "environments": {{ - "cicd": {{ - "account": "{pipeline_environment.AwsAccountId}", - "region": "{pipeline_environment.region}", - "stage": "cicd" - }}{json_envs} - }} -}}""" - - with open(f'{path}/{output_file}', 'w') as text_file: - print(json, file=text_file) - - def initialize_repo(pipeline, code_dir_path, env_vars): - - venv_name = ".venv" - - cmd_init = [ - f"ddk init {pipeline.repo} --generate-only", - f"cp app_multiaccount.py ./{pipeline.repo}/app.py", - f"cp ddk_app/ddk_app_stack_multiaccount.py ./{pipeline.repo}/ddk_app/ddk_app_stack.py", - f"mkdir ./{pipeline.repo}/utils", - f"cp -R utils/* ./{pipeline.repo}/utils/" - ] - - logger.info(f"Running Commands: {'; '.join(cmd_init)}") - - process = subprocess.run( - '; '.join(cmd_init), - text=True, - shell=True, # nosec - encoding='utf-8', - cwd=code_dir_path, - env=env_vars - ) - if process.returncode == 0: - logger.info("Successfully Initialized New CDK/DDK App") - return - - @staticmethod - def _set_env_vars(pipeline_environment): - aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) - env_creds = aws.get_credentials() - - env = { - 'AWS_REGION': pipeline_environment.region, - 'AWS_DEFAULT_REGION': pipeline_environment.region, - 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, - 'envname': os.environ.get('envname', 'local'), - 'COOKIECUTTER_CONFIG': "/dataall/cdkproxy/blueprints/cookiecutter_config.yaml", - } - if env_creds: - env.update( - { - 'AWS_ACCESS_KEY_ID': env_creds.access_key, - 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, - 'AWS_SESSION_TOKEN': env_creds.token - } - ) - return env, aws - - @staticmethod - def _check_repository(aws, region, repo_name): - codecommit_client = aws.client('codecommit', region_name=region) - repository = None - logger.info(f"Checking Repository Exists: {repo_name}") - try: - repository = codecommit_client.get_repository(repositoryName=repo_name) - except ClientError as e: - if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': - logger.debug(f'Repository does not exists {repo_name} %s', e) - else: - raise e - return repository if repository else None diff --git a/backend/dataall/cdkproxy/stacks/pivot_role.py b/backend/dataall/cdkproxy/stacks/pivot_role.py deleted file mode 100644 index c0b157f4a..000000000 --- a/backend/dataall/cdkproxy/stacks/pivot_role.py +++ /dev/null @@ -1,639 +0,0 @@ -from constructs import Construct -from aws_cdk import Duration, aws_iam as iam, NestedStack - - -class PivotRole(NestedStack): - def __init__(self, scope: Construct, construct_id: str, config, **kwargs) -> None: - super().__init__(scope, construct_id, **kwargs) - # Create Pivot IAM Role - self.pivot_role = self.create_pivot_role( - name=config['roleName'], - principal_id=config['accountId'], - external_id=config['externalId'], - env_resource_prefix=config['resourcePrefix'], - ) - # Data.All IAM Lake Formation service role creation - self.lf_service_role = iam.CfnServiceLinkedRole( - self, 'LakeFormationSLR', aws_service_name='lakeformation.amazonaws.com' - ) - - def create_pivot_role(self, name: str, principal_id: str, external_id: str, env_resource_prefix: str) -> iam.Role: - """ - Creates an IAM Role that will enable data.all to interact with this Data Account - - :param str name: Role name - :param str principal_id: AWS Account ID of central data.all - :param str external_id: External ID provided by data.all - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Role - :rtype: iam.Role - """ - - role = iam.Role( - self, - 'DataAllPivotRole-cdk', - role_name=name, - assumed_by=iam.CompositePrincipal( - iam.ServicePrincipal('lakeformation.amazonaws.com'), - iam.ServicePrincipal('glue.amazonaws.com'), - iam.ServicePrincipal('lambda.amazonaws.com'), - ), - path='/', - max_session_duration=Duration.hours(12), - managed_policies=[ - self._create_dataall_policy0(env_resource_prefix), - self._create_dataall_policy1(env_resource_prefix), - self._create_dataall_policy2(env_resource_prefix), - self._create_dataall_policy3(env_resource_prefix, name), - ], - ) - - role.assume_role_policy.add_statements( - iam.PolicyStatement( - effect=iam.Effect.ALLOW, - principals=[iam.AccountPrincipal(account_id=principal_id)], - actions=['sts:AssumeRole'], - conditions={ - 'StringEquals': {'sts:ExternalId': external_id}, - 'StringLike': {"aws:PrincipalArn": [ - f"arn:aws:iam::{principal_id}:role/*graphql-role", - f"arn:aws:iam::{principal_id}:role/*awsworker-role", - f"arn:aws:iam::{principal_id}:role/*ecs-tasks-role" - ]} - }, - ) - ) - - return role - - def _create_dataall_policy0(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the first managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy( - self, - 'PivotRolePolicy0', - managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-0', - statements=[ - # Read Buckets - iam.PolicyStatement( - sid='ReadBuckets', - effect=iam.Effect.ALLOW, - actions=[ - 's3:ListAllMyBuckets', - 's3:GetBucketLocation', - 's3:PutBucketTagging' - ], - resources=['*'], - ), - # S3 Managed Buckets - iam.PolicyStatement( - sid='ManagedBuckets', - effect=iam.Effect.ALLOW, - actions=[ - 's3:List*', - 's3:Delete*', - 's3:Get*', - 's3:Put*' - ], - resources=[f'arn:aws:s3:::{env_resource_prefix}*'], - ), - # S3 Imported Buckets - restrict resources via bucket policies - iam.PolicyStatement( - sid='ImportedBuckets', - effect=iam.Effect.ALLOW, - actions=[ - 's3:List*', - 's3:GetBucket*', - 's3:GetLifecycleConfiguration', - 's3:GetObject', - 's3:PutBucketPolicy', - 's3:PutBucketTagging', - 's3:PutObject', - 's3:PutObjectAcl', - 's3:PutBucketOwnershipControls', - ], - resources=['arn:aws:s3:::*'], - ), - # KMS - needed for imported buckets - iam.PolicyStatement( - sid='KMS', - effect=iam.Effect.ALLOW, - actions=[ - 'kms:Decrypt', - 'kms:Encrypt', - 'kms:GenerateDataKey*', - 'kms:PutKeyPolicy', - 'kms:ReEncrypt*', - 'kms:TagResource', - 'kms:UntagResource', - ], - resources=['*'], - ), - iam.PolicyStatement( - sid='KMSList', - effect=iam.Effect.ALLOW, - actions=[ - 'kms:List*', - 'kms:DescribeKey', - ], - resources=['*'], - ), - # Athena - needed for Worksheets feature - iam.PolicyStatement( - sid='AthenaWorkgroups', - effect=iam.Effect.ALLOW, - actions=[ - "athena:GetQueryExecution", - "athena:GetQueryResults", - "athena:GetWorkGroup", - "athena:StartQueryExecution" - ], - resources=[f'arn:aws:athena:*:{self.account}:workgroup/{env_resource_prefix}*'], - ), - # S3 Access points - needed for access points sharing - iam.PolicyStatement( - sid='ManagedAccessPoints', - effect=iam.Effect.ALLOW, - actions=[ - 's3:GetAccessPoint', - 's3:GetAccessPointPolicy', - 's3:ListAccessPoints', - 's3:CreateAccessPoint', - 's3:DeleteAccessPoint', - 's3:GetAccessPointPolicyStatus', - 's3:DeleteAccessPointPolicy', - 's3:PutAccessPointPolicy', - ], - resources=[f'arn:aws:s3:*:{self.account}:accesspoint/*'], - ), - # Glue - needed to handle databases and tables and cross-account shares - iam.PolicyStatement( - sid='GlueCatalog', - effect=iam.Effect.ALLOW, - actions=[ - 'glue:BatchCreatePartition', - 'glue:BatchDeletePartition', - 'glue:BatchDeleteTable', - 'glue:CreateDatabase', - 'glue:CreatePartition', - 'glue:CreateTable', - 'glue:DeleteDatabase', - 'glue:DeletePartition', - 'glue:DeleteTable', - 'glue:BatchGet*', - 'glue:Get*', - 'glue:List*', - 'glue:SearchTables', - 'glue:UpdateDatabase', - 'glue:UpdatePartition', - 'glue:UpdateTable', - 'glue:TagResource', - 'glue:DeleteResourcePolicy', - 'glue:PutResourcePolicy', - ], - resources=['*'], - ), - # Glue ETL - needed to start crawler and profiling jobs - iam.PolicyStatement( - sid='GlueETL', - effect=iam.Effect.ALLOW, - actions=[ - 'glue:StartCrawler', - 'glue:StartJobRun', - 'glue:StartTrigger', - 'glue:UpdateTrigger', - 'glue:UpdateJob', - 'glue:UpdateCrawler', - ], - resources=[ - f'arn:aws:glue:*:{self.account}:crawler/{env_resource_prefix}*', - f'arn:aws:glue:*:{self.account}:job/{env_resource_prefix}*', - f'arn:aws:glue:*:{self.account}:trigger/{env_resource_prefix}*', - ], - ), - # SNS - For subscriptions - iam.PolicyStatement( - sid='SNSPublish', - effect=iam.Effect.ALLOW, - actions=[ - 'sns:Publish', - 'sns:SetTopicAttributes', - 'sns:GetTopicAttributes', - 'sns:DeleteTopic', - 'sns:Subscribe', - 'sns:TagResource', - 'sns:UntagResource', - 'sns:CreateTopic', - ], - resources=[f'arn:aws:sns:*:{self.account}:{env_resource_prefix}*'], - ), - iam.PolicyStatement( - sid='SNSList', effect=iam.Effect.ALLOW, actions=['sns:ListTopics'], resources=['*'] - ), - # SQS - support SQS queues - iam.PolicyStatement( - sid='SQSList', effect=iam.Effect.ALLOW, actions=['sqs:ListQueues'], resources=['*'] - ), - iam.PolicyStatement( - sid='SQS', - effect=iam.Effect.ALLOW, - actions=[ - 'sqs:ReceiveMessage', - 'sqs:SendMessage' - ], - resources=[f'arn:aws:sqs:*:{self.account}:{env_resource_prefix}*'], - ), - # AWS Logging Buckets - iam.PolicyStatement( - sid='AWSLoggingBuckets', - effect=iam.Effect.ALLOW, - actions=[ - 's3:PutBucketAcl', - 's3:PutBucketNotification' - ], - resources=[f'arn:aws:s3:::{env_resource_prefix}-logging-*'], - ), - # CloudWatch Metrics - iam.PolicyStatement( - sid='CWMetrics', - effect=iam.Effect.ALLOW, - actions=[ - 'cloudwatch:PutMetricData', - 'cloudwatch:GetMetricData', - 'cloudwatch:GetMetricStatistics' - ], - resources=['*'], - ), - # Logs - iam.PolicyStatement( - sid='Logs', - effect=iam.Effect.ALLOW, - actions=[ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - ], - resources=[ - f'arn:aws:logs:*:{self.account}:log-group:/aws/lambda/*', - f'arn:aws:logs:*:{self.account}:log-group:/{env_resource_prefix}*', - ], - ), - # Logging - iam.PolicyStatement( - sid='Logging', effect=iam.Effect.ALLOW, actions=['logs:PutLogEvents'], resources=['*'] - ), - ], - ) - - def _create_dataall_policy1(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the second managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy( - self, - 'PivotRolePolicy1', - managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-1', - statements=[ - # EC2 describe needed for SageMaker - iam.PolicyStatement( - sid='EC2SG', - effect=iam.Effect.ALLOW, - actions=[ - 'ec2:DescribeSubnets', - 'ec2:DescribeSecurityGroups', - 'ec2:DescribeVpcs', - 'ec2:DescribeInstances', - 'ec2:DescribeNetworkInterfaces', - ], - resources=['*'], - ), - # SageMaker - iam.PolicyStatement( - sid='SageMakerNotebookActions', - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:ListTags', - 'sagemaker:DescribeUserProfile', - 'sagemaker:StopNotebookInstance', - 'sagemaker:CreatePresignedNotebookInstanceUrl', - 'sagemaker:DescribeNotebookInstance', - 'sagemaker:StartNotebookInstance', - 'sagemaker:AddTags', - 'sagemaker:DescribeDomain', - 'sagemaker:CreatePresignedDomainUrl', - ], - resources=[ - f'arn:aws:sagemaker:*:{self.account}:notebook-instance/{env_resource_prefix}*', - f'arn:aws:sagemaker:*:{self.account}:domain/*', - f'arn:aws:sagemaker:*:{self.account}:user-profile/*/*', - ], - ), - iam.PolicyStatement( - sid='SageMakerNotebookInstances', - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:ListNotebookInstances', - 'sagemaker:ListDomains', - 'sagemaker:ListApps', - 'sagemaker:DeleteApp', - ], - resources=['*'], - ), - # RAM - iam.PolicyStatement( - sid='RamTag', - effect=iam.Effect.ALLOW, - actions=['ram:TagResource'], - resources=['*'], - conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, - ), - iam.PolicyStatement( - sid='RamCreateResource', - effect=iam.Effect.ALLOW, - actions=['ram:CreateResourceShare'], - resources=['*'], - conditions={ - 'ForAllValues:StringEquals': { - 'ram:RequestedResourceType': ['glue:Table', 'glue:Database', 'glue:Catalog'] - } - }, - ), - iam.PolicyStatement( - sid='RamUpdateResource', - effect=iam.Effect.ALLOW, - actions=['ram:UpdateResourceShare'], - resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], - conditions={ - 'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}, - }, - ), - iam.PolicyStatement( - sid='RamAssociateResource', - effect=iam.Effect.ALLOW, - actions=[ - 'ram:AssociateResourceShare', - 'ram:DisassociateResourceShare' - ], - resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], - conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, - ), - iam.PolicyStatement( - sid='RamDeleteResource', - effect=iam.Effect.ALLOW, - actions=['ram:DeleteResourceShare'], - resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'] - ), - iam.PolicyStatement( - sid='RamInvitations', - effect=iam.Effect.ALLOW, - actions=[ - 'ram:AcceptResourceShareInvitation', - 'ram:RejectResourceShareInvitation', - 'ram:EnableSharingWithAwsOrganization', - ], - resources=['*'], - ), - iam.PolicyStatement( - sid='RamRead', - effect=iam.Effect.ALLOW, - actions=[ - 'ram:Get*', - 'ram:List*' - ], - resources=['*'], - ), - # CloudFormation - iam.PolicyStatement( - sid='CloudFormation', - effect=iam.Effect.ALLOW, - actions=[ - "cloudformation:DeleteStack", - "cloudformation:DescribeStacks", - "cloudformation:DescribeStackEvents", - "cloudformation:DescribeStackResources" - ], - resources=[ - f'arn:aws:cloudformation:*:{self.account}:stack/{env_resource_prefix}*/*', - f'arn:aws:cloudformation:*:{self.account}:stack/CDKToolkit/*', - ], - ), - iam.PolicyStatement( - sid='CloudFormationDataPipeliens', - effect=iam.Effect.ALLOW, - actions=[ - "cloudformation:DeleteStack", - "cloudformation:DescribeStacks", - "cloudformation:DescribeStackEvents", - "cloudformation:DescribeStackResources" - ], - resources=[ - f'arn:aws:cloudformation:*:{self.account}:stack/*/*', - ], - ), - ], - ) - - def _create_dataall_policy2(self, env_resource_prefix: str) -> iam.ManagedPolicy: - """ - Creates the third managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy( - self, - 'PivotRolePolicy2', - managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-2', - statements=[ - # LakeFormation - iam.PolicyStatement( - sid='LakeFormation', - effect=iam.Effect.ALLOW, - actions=[ - 'lakeformation:UpdateResource', - 'lakeformation:DescribeResource', - 'lakeformation:AddLFTagsToResource', - 'lakeformation:RemoveLFTagsFromResource', - 'lakeformation:GetResourceLFTags', - 'lakeformation:ListLFTags', - 'lakeformation:CreateLFTag', - 'lakeformation:GetLFTag', - 'lakeformation:UpdateLFTag', - 'lakeformation:DeleteLFTag', - 'lakeformation:SearchTablesByLFTags', - 'lakeformation:SearchDatabasesByLFTags', - 'lakeformation:ListResources', - 'lakeformation:ListPermissions', - 'lakeformation:GrantPermissions', - 'lakeformation:BatchGrantPermissions', - 'lakeformation:RevokePermissions', - 'lakeformation:BatchRevokePermissions', - 'lakeformation:PutDataLakeSettings', - 'lakeformation:GetDataLakeSettings', - 'lakeformation:GetDataAccess', - 'lakeformation:GetWorkUnits', - 'lakeformation:StartQueryPlanning', - 'lakeformation:GetWorkUnitResults', - 'lakeformation:GetQueryState', - 'lakeformation:GetQueryStatistics', - 'lakeformation:GetTableObjects', - 'lakeformation:UpdateTableObjects', - 'lakeformation:DeleteObjectsOnCancel', - ], - resources=['*'], - ), - # QuickSight - iam.PolicyStatement( - sid='QuickSight', - effect=iam.Effect.ALLOW, - actions=[ - 'quicksight:CreateGroup', - 'quicksight:DescribeGroup', - 'quicksight:ListDashboards', - 'quicksight:DescribeDataSource', - 'quicksight:DescribeDashboard', - 'quicksight:DescribeUser', - 'quicksight:SearchDashboards', - 'quicksight:GetDashboardEmbedUrl', - 'quicksight:GenerateEmbedUrlForAnonymousUser', - 'quicksight:UpdateUser', - 'quicksight:ListUserGroups', - 'quicksight:RegisterUser', - 'quicksight:DescribeDashboardPermissions', - 'quicksight:UpdateDashboardPermissions', - 'quicksight:GetAuthCode', - 'quicksight:CreateGroupMembership', - 'quicksight:DescribeAccountSubscription', - ], - resources=[ - f'arn:aws:quicksight:*:{self.account}:group/default/*', - f'arn:aws:quicksight:*:{self.account}:user/default/*', - f'arn:aws:quicksight:*:{self.account}:datasource/*', - f'arn:aws:quicksight:*:{self.account}:user/*', - f'arn:aws:quicksight:*:{self.account}:dashboard/*', - f'arn:aws:quicksight:*:{self.account}:namespace/default', - f'arn:aws:quicksight:*:{self.account}:account/*', - f'arn:aws:quicksight:*:{self.account}:*', - ], - ), - iam.PolicyStatement( - sid='QuickSightSession', - effect=iam.Effect.ALLOW, - actions=['quicksight:GetSessionEmbedUrl'], - resources=['*'], - ), - ], - ) - - def _create_dataall_policy3(self, env_resource_prefix: str, role_name: str) -> iam.ManagedPolicy: - """ - Creates the fourth managed IAM Policy required for the Pivot Role used by data.all - - :param str env_resource_prefix: Environment Resource Prefix provided by data.all - :param str role_name: IAM Role name - :returns: Created IAM Policy - :rtype: iam.ManagedPolicy - """ - return iam.ManagedPolicy( - self, - 'PivotRolePolicy3', - managed_policy_name=f'{env_resource_prefix}-pivotrole-cdk-policy-3', - statements=[ - # SSM Parameter Store - iam.PolicyStatement( - sid='ParameterStore', - effect=iam.Effect.ALLOW, - actions=['ssm:GetParameter'], - resources=[ - f'arn:aws:ssm:*:{self.account}:parameter/{env_resource_prefix}/*', - f'arn:aws:ssm:*:{self.account}:parameter/dataall/*', - f'arn:aws:ssm:*:{self.account}:parameter/ddk/*', - ], - ), - # IAM - needed for consumption roles and for S3 sharing - iam.PolicyStatement( - sid='IAMListGet', - effect=iam.Effect.ALLOW, - actions=[ - 'iam:ListRoles', - 'iam:Get*' - ], resources=['*'] - ), - iam.PolicyStatement( - sid='IAMRolePolicy', - effect=iam.Effect.ALLOW, - actions=[ - 'iam:PutRolePolicy', - 'iam:DeleteRolePolicy' - ], - resources=['*'], - ), - iam.PolicyStatement( - sid="PassRole", - actions=[ - 'iam:PassRole', - ], - resources=[ - f'arn:aws:iam::{self.account}:role/{role_name}', - ], - ), - iam.PolicyStatement( - sid="PassRoleGlue", - actions=[ - 'iam:PassRole', - ], - resources=[ - f'arn:aws:iam::{self.account}:role/{env_resource_prefix}*', - ], - conditions={ - "StringEquals": { - "iam:PassedToService": [ - "glue.amazonaws.com", - ] - } - } - ), - # STS - iam.PolicyStatement( - sid='STS', - effect=iam.Effect.ALLOW, - actions=['sts:AssumeRole'], - resources=[ - f'arn:aws:iam::{self.account}:role/{env_resource_prefix}*', - f'arn:aws:iam::{self.account}:role/ddk-*', - ], - ), - # CodeCommit - used in Pipelines - iam.PolicyStatement( - sid='CodeCommit', - effect=iam.Effect.ALLOW, - actions=[ - 'codecommit:GetFile', - 'codecommit:ListBranches', - 'codecommit:GetFolder', - 'codecommit:GetCommit', - 'codecommit:GitPull', - 'codecommit:GetRepository', - 'codecommit:TagResource', - 'codecommit:UntagResource', - 'codecommit:CreateBranch', - 'codecommit:CreateCommit', - 'codecommit:CreateRepository', - 'codecommit:DeleteRepository', - 'codecommit:GitPush', - 'codecommit:PutFile', - 'codecommit:GetBranch', - ], - resources=[f'arn:aws:codecommit:*:{self.account}:{env_resource_prefix}*'], - ), - ], - ) diff --git a/backend/dataall/cdkproxy/stacks/policies/_lambda.py b/backend/dataall/cdkproxy/stacks/policies/_lambda.py deleted file mode 100644 index 8da645153..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/_lambda.py +++ /dev/null @@ -1,98 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class Lambda(ServicePolicy): - """ - Class including all permissions needed to work with AWS Lambda. - It allows data.all users to: - - List Lambda resources - - Create and manage team Lambda resources - - Log Lambda executions - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="ListLambda", - actions=[ - 'lambda:List*', - 'lambda:GetLayer*', - 'lambda:GetAccountSettings', - 'lambda:GetEventSourceMapping', - 'lambda:CreateEventSourceMapping', - 'lambda:CreateCodeSigningConfig', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="GenericLambdaFunctions", - actions=[ - 'lambda:UpdateFunctionCodeSigningConfig', - 'lambda:UpdateEventSourceMapping', - ], - resources=[ - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*', - f'arn:aws:lambda:{self.region}:{self.account}:code-signing-config:*', - f'arn:aws:lambda:{self.region}:{self.account}:event-source-mapping:*', - ], - ), - iam.PolicyStatement( - # sid="CreateTeamLambda", - actions=[ - 'lambda:CreateFunction', - 'lambda:TagResource', - ], - resources=[ - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*', - ], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="ManageTeamLambda", - not_actions=[ - 'lambda:CreateFunction', - 'lambda:TagResource', - 'lambda:UntagResource', - ], - resources=[ - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', - f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*' - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="ManageLambdaLayers", - actions=[ - 'lambda:PublishLayerVersion', - 'lambda:DeleteLayerVersion', - ], - resources=[ - f'arn:aws:lambda:{self.region}:{self.account}:layer:{self.resource_prefix}*', - f'arn:aws:lambda:{self.region}:{self.account}:layer:{self.resource_prefix}*:*', - ] - ), - iam.PolicyStatement( - # sid="LoggingLambda", - actions=[ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - 'logs:PutLogEvents', - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/lambda/*', - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/lambda/*:log-stream:*', - ], - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/aws_cicd.py b/backend/dataall/cdkproxy/stacks/policies/aws_cicd.py deleted file mode 100644 index 1f6f5dae0..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/aws_cicd.py +++ /dev/null @@ -1,174 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class AwsCICD(ServicePolicy): - """ - Class including all permissions needed to work with AWS CICD services: CodeCommit, CodePipeline and CodeBuild. - It allows data.all users to: - - Create and manage CodeBuild, CodeCommit and CodePipeline resources for the team - - Create an S3 Bucket for codepipeline prefixed by "codepipeline-" - - Read/Write to and from S3 Buckets prefixed by "codepipeline-" - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="GenericCodeCommit", - actions=[ - 'codecommit:List*', - 'codecommit:CreateApprovalRuleTemplate', - 'codecommit:UpdateApprovalRuleTemplateName', - 'codecommit:GetApprovalRuleTemplate', - 'codecommit:DeleteApprovalRuleTemplate', - 'codecommit:UpdateApprovalRuleTemplateContent', - 'codecommit:UpdateApprovalRuleTemplateDescription', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="TagCICD", - actions=[ - "codecommit:TagResource", - "codepipeline:TagResource" - ], - resources=[ - f'arn:aws:codecommit:{self.region}:{self.account}:{self.resource_prefix}*', - f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*', - f'arn:aws:codepipeline:{self.region}:{self.account}:actiontype:/*/*/*', - f'arn:aws:codepipeline:{self.region}:{self.account}:webhook:{self.resource_prefix}', - ], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value], - }, - }, - ), - iam.PolicyStatement( - # sid="AllCodecommitTeamRepo", - not_actions=[ - "codecommit:TagResource", - "codecommit:UntagResource", - ], - resources=[ - f'arn:aws:codecommit:{self.region}:{self.account}:{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value], - }, - }, - ), - iam.PolicyStatement( - # sid="GenericCodePipeline", - actions=[ - 'codepipeline:AcknowledgeJob', - 'codepipeline:AcknowledgeThirdPartyJob', - 'codepipeline:GetThirdPartyJobDetails', - 'codepipeline:GetJobDetails', - 'codepipeline:GetActionType', - 'codepipeline:ListActionTypes', - 'codepipeline:ListPipelines', - 'codepipeline:PollForThirdPartyJobs', - 'codepipeline:PutThirdPartyJobSuccessResult', - 'codepipeline:PutThirdPartyJobFailureResult', - 'codepipeline:PutJobFailureResult', - 'codepipeline:PutJobSuccessResult', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="AllCodepipelineTeamRepo", - not_actions=[ - "codepipeline:TagResource", - "codepipeline:UntagResource", - ], - resources=[ - f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*/*/*', - f'arn:aws:codepipeline:{self.region}:{self.account}:actiontype:/*/*/*', - f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*', - f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*/*', - f'arn:aws:codepipeline:{self.region}:{self.account}:webhook:{self.resource_prefix}', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="CodePipelineCreateS3Bucket", - effect=iam.Effect.ALLOW, - actions=[ - 's3:CreateBucket', - 's3:ListBucket', - 's3:PutBucketPublicAccessBlock', - 's3:GetObject', - 's3:PutObject', - 's3:DeleteObject' - ], - resources=[ - f"arn:aws:s3:::codepipeline-{self.region}-{self.account}", - f"arn:aws:s3:::codepipeline-{self.region}-{self.account}/{self.resource_prefix}*" - ], - ), - iam.PolicyStatement( - # sid="GenericCodeBuild", - actions=[ - 'codebuild:ListCuratedEnvironmentImages', - 'codebuild:ListReportGroups', - 'codebuild:ListSourceCredentials', - 'codebuild:ListRepositories', - 'codebuild:ListSharedProjects', - 'codebuild:ListBuildBatches', - 'codebuild:ListSharedReportGroups', - 'codebuild:ImportSourceCredentials', - 'codebuild:ListReports', - 'codebuild:ListBuilds', - 'codebuild:DeleteOAuthToken', - 'codebuild:ListProjects', - 'codebuild:DeleteSourceCredentials', - 'codebuild:PersistOAuthToken', - 'codebuild:ListConnectedOAuthAccounts', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="TagCodebuildTeamRepo", - actions=[ - 'codebuild:CreateProject', - 'codebuild:UpdateProject', - 'codebuild:UpdateProjectVisibility', - 'codebuild:CreateReportGroup', - 'codebuild:UpdateReportGroup', - ], - resources=[ - f'arn:aws:codebuild:{self.region}:{self.account}:project/{self.resource_prefix}*', - f'arn:aws:codebuild:{self.region}:{self.account}:report-group/{self.resource_prefix}*', - ], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="AllCodebuildTeamRepo", - not_actions=[ - 'codebuild:CreateProject', - 'codebuild:UpdateProject', - 'codebuild:UpdateProjectVisibility', - 'codebuild:CreateReportGroup', - 'codebuild:UpdateReportGroup', - ], - resources=[ - f'arn:aws:codebuild:{self.region}:{self.account}:project/{self.resource_prefix}*', - f'arn:aws:codebuild:{self.region}:{self.account}:report-group/{self.resource_prefix}*', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/cloudformation.py b/backend/dataall/cdkproxy/stacks/policies/cloudformation.py deleted file mode 100644 index 468efb531..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/cloudformation.py +++ /dev/null @@ -1,53 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class Cloudformation(ServicePolicy): - """ - Class including all permissions needed to work with AWS CloudFormation. - It allows data.all users to: - - Create/Delete CloudFormation team stacks - - Create an S3 Bucket for codepipeline prefixed by "cf-templates-" - - Read/Write to and from S3 Buckets prefixed by "cf-templates-" - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="GenericCloudFormation", - actions=[ - 'cloudformation:EstimateTemplateCost', - 'cloudformation:ListStacks', - 'cloudformation:ValidateTemplate', - 'cloudformation:GetTemplateSummary', - 'cloudformation:ListExports', - 'cloudformation:ListImports', - 'cloudformation:DescribeAccountLimits', - 'cloudformation:DescribeStackDriftDetectionStatus', - 'cloudformation:Cancel*', - 'cloudformation:Continue*', - 'cloudformation:CreateChangeSet', - 'cloudformation:ExecuteChangeSet', - 'cloudformation:CreateStackSet', - 'cloudformation:Get*', - 'cloudformation:Describe*', - 'cloudformation:List*', - 'cloudformation:CreateUploadBucket', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="DeleteTeamCloudFormation", - actions=[ - 'cloudformation:DeleteStack', - ], - resources=[ - f'arn:aws:cloudformation:{self.region}:{self.account}:*/{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/data_policy.py b/backend/dataall/cdkproxy/stacks/policies/data_policy.py deleted file mode 100644 index 3508c8106..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/data_policy.py +++ /dev/null @@ -1,156 +0,0 @@ -import logging -from typing import List - -from aws_cdk import aws_iam as iam -from ....aws.handlers.kms import KMS - -from ....db import models - -logger = logging.getLogger() - - -class DataPolicy: - """ - Class including all permissions needed to work with AWS Lambda. - It allows data.all users to: - - - """ - def __init__( - self, - stack, - id, - name, - account, - region, - tag_key, - tag_value, - resource_prefix, - environment: models.Environment, - team: models.EnvironmentGroup, - datasets: [models.Dataset], - ): - self.stack = stack - self.id = id - self.name = name - self.account = account - self.region = region - self.tag_key = tag_key - self.tag_value = tag_value - self.resource_prefix = resource_prefix - self.environment = environment - self.team = team - self.datasets = datasets - - def generate_data_access_policy(self) -> iam.Policy: - """ - Creates aws_iam.Policy based on team datasets - """ - statements: List[iam.PolicyStatement] = self.get_statements() - - policy: iam.Policy = iam.Policy( - self.stack, - self.id, - policy_name=self.name, - statements=statements, - ) - logger.debug(f'Final generated policy {policy.document.to_json()}') - - return policy - - def get_statements(self): - statements = [ - iam.PolicyStatement( - sid="ListAll", - actions=[ - "s3:ListAllMyBuckets", - "s3:ListAccessPoints", - "s3:GetBucketLocation", - 'kms:ListAliases', - 'kms:ListKeys', - ], - resources=["*"], - effect=iam.Effect.ALLOW - ) - ] - - self.set_allowed_s3_buckets_statements(statements) - self.set_allowed_kms_keys_statements(statements) - - return statements - - def set_allowed_s3_buckets_statements(self, statements): - allowed_buckets = [] - allowed_access_points = [] - if self.datasets: - dataset: models.Dataset - for dataset in self.datasets: - allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}') - allowed_access_points.append(f'arn:aws:s3:{dataset.region}:{dataset.AwsAccountId}:accesspoint/{dataset.datasetUri}*') - allowed_buckets_content = [f"{bucket}/*" for bucket in allowed_buckets] - statements.extend( - [ - iam.PolicyStatement( - sid="ListDatasetsBuckets", - actions=[ - "s3:ListBucket", - "s3:GetBucketLocation" - ], - resources=allowed_buckets, - effect=iam.Effect.ALLOW, - ), - iam.PolicyStatement( - sid="ReadWriteDatasetsBuckets", - actions=[ - "s3:PutObject", - "s3:PutObjectAcl", - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectVersion", - "s3:DeleteObject" - ], - effect=iam.Effect.ALLOW, - resources=allowed_buckets_content, - ), - iam.PolicyStatement( - sid="ReadAccessPointsDatasetBucket", - actions=[ - 's3:GetAccessPoint', - 's3:GetAccessPointPolicy', - 's3:GetAccessPointPolicyStatus', - ], - effect=iam.Effect.ALLOW, - resources=allowed_access_points, - ) - ] - ) - - def set_allowed_kms_keys_statements(self, statements): - allowed_buckets_kms_keys = [] - if self.datasets: - dataset: models.Dataset - for dataset in self.datasets: - if dataset.imported and dataset.importedKmsKey: - key_id = KMS.get_key_id( - account_id=dataset.AwsAccountId, - region=dataset.region, - key_alias=f"alias/{dataset.KmsAlias}" - ) - if key_id: - allowed_buckets_kms_keys.append(f"arn:aws:kms:{dataset.region}:{dataset.AwsAccountId}:key/{key_id}") - if len(allowed_buckets_kms_keys): - statements.extend( - [ - iam.PolicyStatement( - sid="KMSImportedDatasetAccess", - actions=[ - "kms:Decrypt", - "kms:Encrypt", - "kms:ReEncrypt*", - "kms:DescribeKey", - "kms:GenerateDataKey" - ], - effect=iam.Effect.ALLOW, - resources=allowed_buckets_kms_keys - ) - ] - ) diff --git a/backend/dataall/cdkproxy/stacks/policies/databrew.py b/backend/dataall/cdkproxy/stacks/policies/databrew.py deleted file mode 100644 index 0c6c81878..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/databrew.py +++ /dev/null @@ -1,56 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class Databrew(ServicePolicy): - """ - Class including all permissions needed to work with AWS DataBrew. - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="DataBrewGeneric", - actions=['databrew:List*'], - resources=['*'] - ), - iam.PolicyStatement( - # sid="DataBrewRecipes", - actions=[ - 'databrew:BatchDeleteRecipeVersion', - 'databrew:*Recipe', - ], - resources=[ - f'arn:aws:databrew:{self.region}:{self.account}:recipe/{self.resource_prefix}*' - ], - ), - iam.PolicyStatement( - # sid="DataBrewManageTeamResources", - not_actions=[ - 'databrew:Create*', - 'databrew:TagResource', - 'databrew:UntagResource', - ], - resources=[ - f'arn:aws:databrew:{self.region}:{self.account}:*/{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="DataBrewCreateTeamResources", - actions=[ - 'databrew:Create*', - 'databrew:TagResource', - ], - resources=[ - f'arn:aws:databrew:{self.region}:{self.account}:*/{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': {f'aws:RequestTag/{self.tag_key}': [self.tag_value]} - }, - ), - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/glue.py b/backend/dataall/cdkproxy/stacks/policies/glue.py deleted file mode 100644 index 899ca92a6..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/glue.py +++ /dev/null @@ -1,194 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class GlueCatalog(ServicePolicy): - """ - Class including all permissions needed to work with AWS Glue Catalog. - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="GlueLFReadData", - effect=iam.Effect.ALLOW, - actions=[ - "lakeformation:GetDataAccess", - "glue:GetTable", - "glue:GetTables", - "glue:SearchTables", - "glue:GetDatabase", - "glue:GetDatabases", - "glue:GetPartitions", - "lakeformation:GetResourceLFTags", - "lakeformation:ListLFTags", - "lakeformation:GetLFTag", - "lakeformation:SearchTablesByLFTags", - "lakeformation:SearchDatabasesByLFTags" - ], - resources=["*"], - ), - iam.PolicyStatement( - # sid="GlueManageCatalog", - actions=[ - 'glue:CreateConnection', - 'glue:CreateDatabase', - 'glue:CreatePartition', - 'glue:CreateTable', - 'glue:CreateUserDefinedFunction', - 'glue:DeleteConnection', - 'glue:DeleteDatabase', - 'glue:DeleteTable', - 'glue:DeleteTableVersion', - 'glue:DeleteUserDefinedFunction', - 'glue:UpdateConnection', - 'glue:UpdateDatabase', - 'glue:UpdatePartition', - 'glue:UpdateTable', - 'glue:UpdateUserDefinedFunction', - 'glue:BatchCreatePartition', - 'glue:BatchDeleteConnection', - 'glue:BatchDeletePartition', - 'glue:BatchDeleteTable', - 'glue:BatchDeleteTableVersion', - 'glue:BatchGetPartition', - ], - resources=[ - f'arn:aws:glue:{self.region}:{self.account}:userDefinedFunction/{self.resource_prefix}*/*', - f'arn:aws:glue:{self.region}:{self.account}:database/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:catalog', - f'arn:aws:glue:{self.region}:{self.account}:table/{self.resource_prefix}*/*', - f'arn:aws:glue:{self.region}:{self.account}:connection/{self.resource_prefix}*', - ], - ) - ] - return statements - - -class Glue(ServicePolicy): - """ - Class including all permissions needed to work with AWS Glue ETL. - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - # sid="ListBucketProfilingGlue", - actions=[ - "s3:ListBucket", - ], - effect=iam.Effect.ALLOW, - resources=[f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}'], - conditions={"StringEquals": { - "s3:prefix": ["", "profiling/", "profiling/code/"], - "s3:delimiter": ["/"]}} - ), - iam.PolicyStatement( - # sid="ReadEnvironmentBucketProfilingGlue", - actions=[ - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectVersion", - ], - resources=[ - f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}/profiling/code/*'], - effect=iam.Effect.ALLOW, - ), - iam.PolicyStatement( - # sid="GlueList", - effect=iam.Effect.ALLOW, - actions=[ - 'glue:Get*', - 'glue:List*', - 'glue:BatchGet*', - ], - resources=["*"], - ), - iam.PolicyStatement( - # sid="GlueCreateS3Bucket", - effect=iam.Effect.ALLOW, - actions=[ - 's3:CreateBucket', - 's3:ListBucket', - 's3:PutBucketPublicAccessBlock' - ], - resources=[f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}'], - ), - iam.PolicyStatement( - # sid="GlueReadWriteS3Bucket", - actions=[ - 's3:GetObject', - 's3:PutObject', - 's3:DeleteObject' - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}/{self.resource_prefix}/{self.team.groupUri}/', - f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}/{self.resource_prefix}/{self.team.groupUri}/*', - ], - ), - iam.PolicyStatement( - # sid="GlueCreate", - effect=iam.Effect.ALLOW, - actions=[ - 'glue:CreateDevEndpoint', - 'glue:CreateCrawler', - 'glue:CreateJob', - 'glue:CreateTrigger', - 'glue:TagResource' - ], - resources=[ - f'arn:aws:glue:{self.region}:{self.account}:crawler/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:job/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:devEndpoint/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:catalog', - f'arn:aws:glue:{self.region}:{self.account}:trigger/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:table/{self.resource_prefix}*/*', - ], - conditions={ - 'StringEquals': {f'aws:RequestTag/{self.tag_key}': [self.tag_value]} - } - ), - iam.PolicyStatement( - # sid="GlueManageGlueResources", - effect=iam.Effect.ALLOW, - not_actions=[ - 'glue:CreateDevEndpoint', - 'glue:CreateTrigger', - 'glue:CreateJob', - 'glue:CreateCrawler', - ], - resources=[ - f'arn:aws:glue:{self.region}:{self.account}:devEndpoint/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:trigger/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:job/{self.resource_prefix}*', - f'arn:aws:glue:{self.region}:{self.account}:crawler/{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': { - f'aws:resourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - iam.PolicyStatement( - # sid="SupportGluePermissions", - effect=iam.Effect.ALLOW, - actions=[ - 'glue:*Classifier', - 'glue:CreateScript', - ], - resources=['*'], - ), - iam.PolicyStatement( - # sid="LoggingGlue", - actions=[ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - 'logs:PutLogEvents', - ], - effect=iam.Effect.ALLOW, - resources=[ - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws-glue/*', - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws-glue/*:log-stream:*', - ], - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/quicksight.py b/backend/dataall/cdkproxy/stacks/policies/quicksight.py deleted file mode 100644 index 3690b0f95..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/quicksight.py +++ /dev/null @@ -1,28 +0,0 @@ -from aws_cdk import aws_iam as iam - -from .service_policy import ServicePolicy - - -class QuickSight(ServicePolicy): - """ - Class including all permissions needed to work with Amazon Quicksight. - It allows data.all users to: - - - """ - def get_statements(self): - return [ - iam.PolicyStatement( - # sid="QuicksightList", - effect=iam.Effect.ALLOW, - actions=['quicksight:List*'], - resources=['*'], - ), - iam.PolicyStatement( - # sid="QuicksightManageTeamResources", - effect=iam.Effect.ALLOW, - actions=['quicksight:*'], - resources=[ - f'arn:aws:quicksight:{self.region}:{self.account}:*/{self.resource_prefix}-{self.team.groupUri}*' - ], - ), - ] diff --git a/backend/dataall/cdkproxy/stacks/policies/sagemaker.py b/backend/dataall/cdkproxy/stacks/policies/sagemaker.py deleted file mode 100644 index 8d6a08d2b..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/sagemaker.py +++ /dev/null @@ -1,205 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam as iam - - -class Sagemaker(ServicePolicy): - """ - Class including all permissions needed to work with Amazon SageMaker. - - Allow creation and management of SageMaker Notebooks only if tagged with team tag - - DO NOT allow creation of domain because this is handled in the environment stack - - DO NOT allow creation of user-profiles because this is handled in the ML Studio stack - - Allow management of domains and user-profiles tagged with team tag - - Allow any action besides the above listed ones on resources that are not notebooks, domains, apps and user-profiles - - Allow support permissions on ECR, Service Catalog and logging - """ - def get_statements(self): - statements = [ - iam.PolicyStatement( - effect=iam.Effect.ALLOW, - actions=['sagemaker:AddTags'], - resources=['*'], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value], - f'aws:RequestTag/{self.tag_key}': [self.tag_value], - }, - }, - ), - iam.PolicyStatement( - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:List*', - 'sagemaker:Describe*', - 'sagemaker:BatchGet*', - 'sagemaker:BatchDescribe*', - 'sagemaker:Search', - 'sagemaker:RenderUiTemplate', - 'sagemaker:GetSearchSuggestions', - 'sagemaker:QueryLineage', - 'sagemaker:GetSagemakerServicecatalogPortfolioStatus', - 'sagemaker:CreateNotebookInstanceLifecycleConfig', - 'sagemaker:DeleteNotebookInstanceLifecycleConfig', - ], - resources=['*'], - ), - # SageMaker Notebooks permissions - iam.PolicyStatement( - # sid="SageMakerCreateTaggedResourcesNotebooks", - effect=iam.Effect.ALLOW, - actions=['sagemaker:CreateNotebookInstance'], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', - - ], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value], - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - }, - }, - ), - iam.PolicyStatement( - # sid="SageMakerCreatePresignedNotebookInstanceUrl", - effect=iam.Effect.ALLOW, - actions=['sagemaker:CreatePresignedNotebookInstanceUrl'], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', - ], - conditions={ - 'StringEquals': { - f'sagemaker:ResourceTag/{self.tag_key}': [self.tag_value] - }, - }, - ), - iam.PolicyStatement( - # sid="SageMakerManageResourcesNotebooks", - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:*NotebookInstance', - ], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - }, - }, - ), - # SageMaker Studio permissions - iam.PolicyStatement( - # sid="SageMakerManageTeamResourcesMLStudio", - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:DeleteDomain', - 'sagemaker:DeleteUserProfile', - 'sagemaker:UpdateDomain', - 'sagemaker:UpdateUserProfile', - ], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:domain/*', - f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - # For everything that is not domains and user-profiles we allow permissions if the resource is tagged - # Deny on creation of domains and users, generic allow for prefixed and tagged resources - # allow for apps (cannot be tagged) and special tag needed for CreatePresignedDomainUrl - iam.PolicyStatement( - # sid="SageMakerDenyCreateDomainsUsers", - effect=iam.Effect.DENY, - actions=['sagemaker:Create*'], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:domain/*', - f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*', - ], - ), - iam.PolicyStatement( - # sid="SageMakerCreateGenericResources", - effect=iam.Effect.ALLOW, - actions=['sagemaker:Create*'], - not_resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*', - f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*/*', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value], - f'aws:RequestTag/{self.tag_key}': [self.tag_value], - }, - }, - ), - iam.PolicyStatement( - # sid="SageMakerApps", - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:CreateApp', - 'sagemaker:DeleteApp' - ], - resources=[f'arn:aws:sagemaker:{self.region}:{self.account}:app/*/*'] - ), - iam.PolicyStatement( - # sid="SageMakerCreatePresignedDomainUrl", - effect=iam.Effect.ALLOW, - actions=['sagemaker:CreatePresignedDomainUrl'], - resources=[f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*'], - conditions={ - 'StringEquals': { - f'sagemaker:ResourceTag/{self.tag_key}': [self.tag_value] - }, - }, - ), - iam.PolicyStatement( - # sid="SageMakerManageGenericResources", - effect=iam.Effect.ALLOW, - actions=[ - 'sagemaker:Delete*', - 'sagemaker:Update*', - 'sagemaker:Start*', - 'sagemaker:Stop*', - 'sagemaker:InvokeEndpoint', - 'sagemaker:InvokeEndpointAsync' - ], - resources=[ - f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*', - f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*/*', - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value], - }, - }, - ), - # Logging and support permissions - iam.PolicyStatement( - # sid="SageMakerLogging", - effect=iam.Effect.ALLOW, - actions=[ - 'logs:CreateLogGroup', - 'logs:CreateLogStream', - 'logs:PutLogEvents' - ], - resources=[ - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*', - f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*:log-stream:*', - ] - ), - iam.PolicyStatement( - # sid="SageMakerSupport", - effect=iam.Effect.ALLOW, - actions=[ - 'ecr:GetAuthorizationToken', - 'ecr:BatchCheckLayerAvailability', - 'ecr:GetDownloadUrlForLayer', - 'ecr:BatchGetImage', - 'servicecatalog:ListAcceptedPortfolioShares', - 'servicecatalog:ListPrincipalsForPortfolio', - ], - resources=['*'] - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/sqs.py b/backend/dataall/cdkproxy/stacks/policies/sqs.py deleted file mode 100644 index fcfe43dda..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/sqs.py +++ /dev/null @@ -1,57 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam - - -class SQS(ServicePolicy): - """ - Class including all permissions needed to work with AWS SQS queues. - """ - def get_statements(self): - - statements = [ - aws_iam.PolicyStatement( - # sid='SQSRead', - effect=aws_iam.Effect.ALLOW, - actions=[ - "sqs:ListQueues", - ], - resources=["*"] - ), - aws_iam.PolicyStatement( - # sid='SQSCreate', - effect=aws_iam.Effect.ALLOW, - actions=[ - "sqs:CreateQueue", - "sqs:TagQueue", - ], - resources=[f"arn:aws:sqs:*:{self.account}:{self.resource_prefix}*"], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value] - } - } - ), - aws_iam.PolicyStatement( - # sid='SQSManageTeamQueue', - effect=aws_iam.Effect.ALLOW, - actions=[ - "sqs:GetQueueUrl", - "sqs:DeleteQueue", - "sqs:GetQueueAttributes", - "sqs:SetQueueAttributes", - "sqs:ListQueueTags", - "sqs:ListDeadLetterSourceQueues", - "sqs:SendMessage", - "sqs:ReceiveMessage", - "sqs:DeleteMessage", - "sqs:ChangeMessageVisibility", - ], - resources=[f"arn:aws:sqs:*:{self.account}:{self.resource_prefix}*"], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - } - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/ssm.py b/backend/dataall/cdkproxy/stacks/policies/ssm.py deleted file mode 100644 index 8d86de43a..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/ssm.py +++ /dev/null @@ -1,54 +0,0 @@ -from .service_policy import ServicePolicy -from aws_cdk import aws_iam - - -class SSM(ServicePolicy): - """ - Class including all permissions needed to work with AWS SSM Parameter Store. - """ - - def get_statements(self): - statements = [ - aws_iam.PolicyStatement( - # sid="SSMReadAll", - effect=aws_iam.Effect.ALLOW, - actions=[ - "ssm:DescribeParameters", - ], - resources=["*"], - ), - aws_iam.PolicyStatement( - # sid='CreateTeamParameters', - effect=aws_iam.Effect.ALLOW, - actions=[ - 'ssm:AddTagsToResource' - ], - resources=[f"arn:aws:ssm:*:{self.account}:parameter/{self.resource_prefix}*"], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value] - } - }, - ), - aws_iam.PolicyStatement( - # sid='ManageTeamParameters', - effect=aws_iam.Effect.ALLOW, - actions=[ - 'ssm:PutParameter', - 'ssm:DeleteParameter', - 'ssm:GetParameterHistory', - 'ssm:GetParametersByPath', - 'ssm:GetParameters', - 'ssm:GetParameter', - 'ssm:DeleteParameters', - 'ssm:ListTagsForResource', - ], - resources=[f"arn:aws:ssm:*:{self.account}:parameter/{self.resource_prefix}*"], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ) - ] - return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/stepfunctions.py b/backend/dataall/cdkproxy/stacks/policies/stepfunctions.py deleted file mode 100644 index a80c29f46..000000000 --- a/backend/dataall/cdkproxy/stacks/policies/stepfunctions.py +++ /dev/null @@ -1,65 +0,0 @@ -from aws_cdk import aws_iam as aws_iam - -from .service_policy import ServicePolicy - - -class StepFunctions(ServicePolicy): - """ - Class including all permissions needed to work with AWS Step Functions. - """ - def get_statements(self): - return [ - aws_iam.PolicyStatement( - # sid='ListMonitorStepFunctions', - effect=aws_iam.Effect.ALLOW, - actions=[ - 'states:ListStateMachines', - 'states:ListActivities', - 'states:SendTaskFailure', - 'states:SendTaskSuccess', - 'states:SendTaskHeartbeat', - ], - resources=['*'], - ), - aws_iam.PolicyStatement( - # sid='CreateTeamStepFunctions', - effect=aws_iam.Effect.ALLOW, - actions=[ - 'states:CreateStateMachine', - 'states:UpdateStateMachine', - 'states:CreateActivity', - 'states:TagResource' - ], - resources=[ - f'arn:aws:states:{self.region}:{self.account}:stateMachine:{self.resource_prefix}*', - f'arn:aws:states:{self.region}:{self.account}:activity:{self.resource_prefix}*', - ], - conditions={ - 'StringEquals': { - f'aws:RequestTag/{self.tag_key}': [self.tag_value] - } - }, - ), - aws_iam.PolicyStatement( - # sid='ManageTeamStepFunctions', - effect=aws_iam.Effect.ALLOW, - actions=[ - 'states:Delete*', - 'states:Describe*', - 'states:Get*', - 'states:List*', - 'states:Start*', - 'states:StopExecution' - ], - resources=[ - f'arn:aws:states:{self.region}:{self.account}:execution:{self.resource_prefix}*:*', - f'arn:aws:states:{self.region}:{self.account}:activity:{self.resource_prefix}*', - f'arn:aws:states:{self.region}:{self.account}:stateMachine:{self.resource_prefix}*' - ], - conditions={ - 'StringEquals': { - f'aws:ResourceTag/{self.tag_key}': [self.tag_value] - } - }, - ), - ] diff --git a/backend/dataall/cdkproxy/stacks/redshift_cluster.py b/backend/dataall/cdkproxy/stacks/redshift_cluster.py deleted file mode 100644 index ee7839fc2..000000000 --- a/backend/dataall/cdkproxy/stacks/redshift_cluster.py +++ /dev/null @@ -1,189 +0,0 @@ -import json -import logging -import os - -from aws_cdk import ( - aws_ec2 as ec2, - aws_redshift_alpha as redshift, - aws_ec2, - aws_kms, - aws_secretsmanager, - aws_iam, - aws_s3, - RemovalPolicy, - Duration, - Stack, -) -from aws_cdk.aws_secretsmanager import SecretStringGenerator - -from .manager import stack -from ... import db -from ...db import models -from ...db.api import Environment -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -@stack(stack='redshift') -class RedshiftStack(Stack): - module_name = __file__ - - def get_engine(self) -> db.Engine: - return db.get_engine(envname=os.environ.get('envname', 'local')) - - def get_target(self, target_uri): - engine = self.get_engine() - with engine.scoped_session() as session: - cluster: models.RedshiftCluster = session.query(models.RedshiftCluster).get( - target_uri - ) - environment: models.Environment = session.query(models.Environment).get( - cluster.environmentUri - ) - return cluster, environment - - def get_env_group(self, cluster: models.RedshiftCluster) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, cluster.SamlGroupName, cluster.environmentUri - ) - return env - - def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: - super().__init__(scope, - id, - description="Cloud formation stack of REDSHIFT CLUSTER: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri)[0].label, - target_uri, - self.get_target(target_uri=target_uri)[0].description, - )[:1024], - **kwargs) - - # Required for dynamic stack tagging - self.target_uri = target_uri - - cluster, environment = self.get_target(target_uri=target_uri) - - env_group = self.get_env_group(cluster) - - if not cluster.imported: - vpc = aws_ec2.Vpc.from_lookup( - self, 'vpcRedshiftcluster', vpc_id=cluster.vpc - ) - - security_group = aws_ec2.SecurityGroup( - self, - f'sg{cluster.name}', - vpc=vpc, - allow_all_outbound=True, - security_group_name=cluster.name, - ) - - key = aws_kms.Key( - self, - f'key{cluster.name}', - removal_policy=RemovalPolicy.RETAIN, - alias=f'{cluster.name}', - enable_key_rotation=True, - ) - - cluster_parameter_group = redshift.ClusterParameterGroup( - self, - 'RedshiftClusterParameterGroup', - description=f'{cluster.name} parameter group', - parameters={ - 'enable_user_activity_logging': 'true', - 'require_ssl': 'true', - }, - ) - - cluster_subnet_group = redshift.ClusterSubnetGroup( - self, - cluster.name, - description=f'Redshift Cluster {cluster.name} subnet group', - vpc=vpc, - removal_policy=RemovalPolicy.DESTROY, - ) - - master_secret = redshift.DatabaseSecret( - self, - f'{environment.resourcePrefix}-msredshift-{cluster.clusterUri}'[:23], - username=cluster.masterUsername, - ) - master_secret.add_rotation_schedule( - id='msRot', - automatically_after=Duration.days(90), - hosted_rotation=aws_secretsmanager.HostedRotation.redshift_single_user(), - ) - redshift_login = redshift.Login( - master_username=master_secret.secret_value_from_json( - 'username' - ).to_string(), - master_password=master_secret.secret_value_from_json('password'), - ) - redshift_role = aws_iam.Role.from_role_arn( - self, 'RedshiftRole', role_arn=env_group.environmentIAMRoleArn - ) - redshift_cluster = redshift.Cluster( - self, - 'RedshiftCluster', - cluster_name=cluster.name, - master_user=redshift_login, - vpc=vpc, - default_database_name=cluster.masterDatabaseName, - cluster_type=redshift.ClusterType.SINGLE_NODE - if cluster.numberOfNodes == 1 - else redshift.ClusterType.MULTI_NODE, - number_of_nodes=None - if cluster.numberOfNodes == 1 - else cluster.numberOfNodes, - node_type=redshift.NodeType(cluster.nodeType.replace('.', '_').upper()), - port=cluster.port, - roles=[redshift_role], - publicly_accessible=False, - encrypted=True, - encryption_key=key, - parameter_group=cluster_parameter_group, - security_groups=[ - security_group, - ], - subnet_group=cluster_subnet_group, - logging_bucket=aws_s3.Bucket.from_bucket_name( - self, - 'EnvLoggingBucket', - f'{environment.EnvironmentDefaultBucketName}', - ), - logging_key_prefix=f'redshift_logs/{cluster.name}/', - ) - - else: - redshift.Cluster.from_cluster_attributes( - self, - 'ImportedRedshiftCluster', - cluster_name=cluster.name, - cluster_endpoint_address=cluster.endpoint, - cluster_endpoint_port=cluster.port, - ) - - dh_user_secret = aws_secretsmanager.Secret( - self, - 'UserSecret', - secret_name=cluster.datahubSecret, - generate_secret_string=SecretStringGenerator( - secret_string_template=json.dumps({'username': cluster.databaseUser}), - generate_string_key='password', - exclude_punctuation=True, - ), - ) - dh_user_secret.add_rotation_schedule( - id='rt', - automatically_after=Duration.days(90), - hosted_rotation=aws_secretsmanager.HostedRotation.redshift_single_user(), - ) - - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) diff --git a/backend/dataall/cdkproxy/stacks/sagemakerstudio.py b/backend/dataall/cdkproxy/stacks/sagemakerstudio.py deleted file mode 100644 index e1767b5cd..000000000 --- a/backend/dataall/cdkproxy/stacks/sagemakerstudio.py +++ /dev/null @@ -1,305 +0,0 @@ -import logging -import os -from aws_cdk import ( - cloudformation_include as cfn_inc, - aws_ec2 as ec2, - aws_iam as iam, - aws_kms as kms, - aws_logs as logs, - aws_sagemaker as sagemaker, - aws_ssm as ssm, - RemovalPolicy, - Stack -) -from botocore.exceptions import ClientError -from .manager import stack -from ... import db -from ...db import models -from ...db.api import Environment -from ...aws.handlers.parameter_store import ParameterStoreManager -from ...aws.handlers.sts import SessionHelper -from ...aws.handlers.sagemaker_studio import SagemakerStudio -from ...aws.handlers.ec2 import EC2 -from ...utils.cdk_nag_utils import CDKNagUtil -from ...utils.runtime_stacks_tagging import TagsUtil - -logger = logging.getLogger(__name__) - - -class SageMakerDomain: - def __init__( - self, - stack, - id, - environment: models.Environment, - ): - self.stack = stack - self.id = id - self.environment = environment - - def check_existing_sagemaker_studio_domain(self): - logger.info('Check if there is an existing sagemaker studio domain in the account') - try: - logger.info('check sagemaker studio domain created as part of data.all environment stack.') - cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( - accountid=self.environment.AwsAccountId, region=self.environment.region - ) - dataall_created_domain = ParameterStoreManager.client( - AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn - ).get_parameter(Name=f'/dataall/{self.environment.environmentUri}/sagemaker/sagemakerstudio/domain_id') - return False - except ClientError as e: - logger.info(f'check sagemaker studio domain created outside of data.all. Parameter data.all not found: {e}') - existing_domain = SagemakerStudio.get_sagemaker_studio_domain( - AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn - ) - return existing_domain.get('DomainId', False) - - def create_sagemaker_domain_resources(self, sagemaker_principals): - logger.info('Creating SageMaker base resources..') - cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( - accountid=self.environment.AwsAccountId, region=self.environment.region - ) - existing_default_vpc = EC2.check_default_vpc_exists( - AwsAccountId=self.environment.AwsAccountId, region=self.environment.region, role=cdk_look_up_role_arn - ) - if existing_default_vpc: - logger.info("Using default VPC for Sagemaker Studio domain") - # Use default VPC - initial configuration (to be migrated) - vpc = ec2.Vpc.from_lookup(self.stack, 'VPCStudio', is_default=True) - subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] - subnet_ids += [public_subnet.subnet_id for public_subnet in vpc.public_subnets] - subnet_ids += [isolated_subnet.subnet_id for isolated_subnet in vpc.isolated_subnets] - security_groups = [] - else: - logger.info("Default VPC not found, Exception. Creating a VPC for SageMaker resources...") - # Create VPC with 3 Public Subnets and 3 Private subnets wit NAT Gateways - log_group = logs.LogGroup( - self.stack, - f'SageMakerStudio{self.environment.name}', - log_group_name=f'/{self.environment.resourcePrefix}/{self.environment.name}/vpc/sagemakerstudio', - retention=logs.RetentionDays.ONE_MONTH, - removal_policy=RemovalPolicy.DESTROY, - ) - vpc_flow_role = iam.Role( - self.stack, 'FlowLog', - assumed_by=iam.ServicePrincipal('vpc-flow-logs.amazonaws.com') - ) - vpc = ec2.Vpc( - self.stack, - "SageMakerVPC", - max_azs=3, - cidr="10.10.0.0/16", - subnet_configuration=[ - ec2.SubnetConfiguration( - subnet_type=ec2.SubnetType.PUBLIC, - name="Public", - cidr_mask=24 - ), - ec2.SubnetConfiguration( - subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT, - name="Private", - cidr_mask=24 - ), - ], - enable_dns_hostnames=True, - enable_dns_support=True, - ) - ec2.FlowLog( - self.stack, "StudioVPCFlowLog", - resource_type=ec2.FlowLogResourceType.from_vpc(vpc), - destination=ec2.FlowLogDestination.to_cloud_watch_logs(log_group, vpc_flow_role) - ) - # setup security group to be used for sagemaker studio domain - sagemaker_sg = ec2.SecurityGroup( - self.stack, - "SecurityGroup", - vpc=vpc, - description="Security Group for SageMaker Studio", - ) - - sagemaker_sg.add_ingress_rule(sagemaker_sg, ec2.Port.all_traffic()) - security_groups = [sagemaker_sg.security_group_id] - subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] - - vpc_id = vpc.vpc_id - - sagemaker_domain_role = iam.Role( - self.stack, - 'RoleForSagemakerStudioUsers', - assumed_by=iam.ServicePrincipal('sagemaker.amazonaws.com'), - role_name='RoleSagemakerStudioUsers', - managed_policies=[ - iam.ManagedPolicy.from_managed_policy_arn( - self.stack, - id='SagemakerFullAccess', - managed_policy_arn='arn:aws:iam::aws:policy/AmazonSageMakerFullAccess', - ), - iam.ManagedPolicy.from_managed_policy_arn( - self.stack, id='S3FullAccess', managed_policy_arn='arn:aws:iam::aws:policy/AmazonS3FullAccess' - ), - ], - ) - - sagemaker_domain_key = kms.Key( - self.stack, - 'SagemakerDomainKmsKey', - alias='SagemakerStudioDomain', - enable_key_rotation=True, - admins=[ - iam.ArnPrincipal(self.environment.CDKRoleArn) - ], - policy=iam.PolicyDocument( - assign_sids=True, - statements=[ - iam.PolicyStatement( - actions=[ - "kms:Encrypt", - "kms:Decrypt", - "kms:ReEncrypt*", - "kms:GenerateDataKey*", - "kms:CreateGrant" - ], - effect=iam.Effect.ALLOW, - principals=[ - sagemaker_domain_role, - iam.ArnPrincipal(self.environment.CDKRoleArn) - ] + sagemaker_principals, - resources=["*"], - conditions={ - "StringEquals": { - "kms:ViaService": [ - f"sagemaker.{self.environment.region}.amazonaws.com", - f"elasticfilesystem.{self.environment.region}.amazonaws.com", - f"ec2.{self.environment.region}.amazonaws.com", - f"s3.{self.environment.region}.amazonaws.com" - ] - } - } - ), - iam.PolicyStatement( - actions=[ - "kms:DescribeKey", - "kms:List*", - "kms:GetKeyPolicy", - ], - effect=iam.Effect.ALLOW, - principals=[ - sagemaker_domain_role, - ] + sagemaker_principals, - resources=["*"], - ) - ], - ), - ) - - sagemaker_domain = sagemaker.CfnDomain( - self.stack, - 'SagemakerStudioDomain', - domain_name=f'SagemakerStudioDomain-{self.environment.region}-{self.environment.AwsAccountId}', - auth_mode='IAM', - default_user_settings=sagemaker.CfnDomain.UserSettingsProperty( - execution_role=sagemaker_domain_role.role_arn, - security_groups=security_groups, - sharing_settings=sagemaker.CfnDomain.SharingSettingsProperty( - notebook_output_option='Allowed', - s3_kms_key_id=sagemaker_domain_key.key_id, - s3_output_path=f's3://sagemaker-{self.environment.region}-{self.environment.AwsAccountId}', - ), - ), - vpc_id=vpc_id, - subnet_ids=subnet_ids, - app_network_access_type='VpcOnly', - kms_key_id=sagemaker_domain_key.key_id, - ) - - ssm.StringParameter( - self.stack, - 'SagemakerStudioDomainId', - string_value=sagemaker_domain.attr_domain_id, - parameter_name=f'/dataall/{self.environment.environmentUri}/sagemaker/sagemakerstudio/domain_id', - ) - return sagemaker_domain - - -@stack(stack='sagemakerstudiouserprofile') -class SagemakerStudioUserProfile(Stack): - module_name = __file__ - - def get_engine(self) -> db.Engine: - ENVNAME = os.environ.get('envname', 'local') - engine = db.get_engine(envname=ENVNAME) - return engine - - def get_target(self, target_uri) -> models.SagemakerStudioUserProfile: - engine = self.get_engine() - with engine.scoped_session() as session: - sm_user_profile = session.query(models.SagemakerStudioUserProfile).get( - target_uri - ) - return sm_user_profile - - def get_env(self, environment_uri) -> models.Environment: - engine = self.get_engine() - with engine.scoped_session() as session: - env = session.query(models.Environment).get(environment_uri) - return env - - def get_env_group( - self, sm_user_profile: models.SagemakerStudioUserProfile - ) -> models.EnvironmentGroup: - engine = self.get_engine() - with engine.scoped_session() as session: - env = Environment.get_environment_group( - session, - sm_user_profile.SamlAdminGroupName, - sm_user_profile.environmentUri, - ) - return env - - def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: - super().__init__(scope, - id, - description="Cloud formation stack of SM STUDIO PROFILE: {}; URI: {}; DESCRIPTION: {}".format( - self.get_target(target_uri=target_uri).label, - target_uri, - self.get_target(target_uri=target_uri).description, - )[:1024], - **kwargs) - - # Required for dynamic stack tagging - self.target_uri = target_uri - - sm_user_profile: models.SagemakerStudioUserProfile = self.get_target( - target_uri=self.target_uri - ) - - env_group = self.get_env_group(sm_user_profile) - - # SageMaker Studio User Profile - cfn_template_user_profile = os.path.join( - os.path.dirname(__file__), '..', 'cfnstacks', 'sagemaker-user-template.yaml' - ) - user_profile_parameters = dict( - sagemaker_domain_id=sm_user_profile.sagemakerStudioDomainID, - user_profile_name=sm_user_profile.sagemakerStudioUserProfileNameSlugify, - execution_role=env_group.environmentIAMRoleArn, - ) - logger.info(f'Creating the user profile {user_profile_parameters}') - - my_sagemaker_studio_user_template = cfn_inc.CfnInclude( - self, - f'SagemakerStudioUserProfile{self.target_uri}', - template_file=cfn_template_user_profile, - parameters=user_profile_parameters, - ) - - self.user_profile_arn = ( - my_sagemaker_studio_user_template.get_resource('SagemakerUser') - .get_att('UserProfileArn') - .to_string() - ) - - TagsUtil.add_tags(self) - - CDKNagUtil.check_rules(self) diff --git a/backend/dataall/core/__init__.py b/backend/dataall/core/__init__.py new file mode 100644 index 000000000..94060c980 --- /dev/null +++ b/backend/dataall/core/__init__.py @@ -0,0 +1,11 @@ +"""The package contains the core functionality that is required by data.all to work correctly""" +from dataall.core import ( + permissions, + stacks, + notifications, + cognito_groups, + environment, + organizations, + tasks, + vpc, +) diff --git a/backend/dataall/cdkproxy/assets/__init__.py b/backend/dataall/core/activity/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/assets/__init__.py rename to backend/dataall/core/activity/__init__.py diff --git a/backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/ddk_app/__init__.py b/backend/dataall/core/activity/db/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/ddk_app/__init__.py rename to backend/dataall/core/activity/db/__init__.py diff --git a/backend/dataall/core/activity/db/activity_models.py b/backend/dataall/core/activity/db/activity_models.py new file mode 100644 index 000000000..d0a76c447 --- /dev/null +++ b/backend/dataall/core/activity/db/activity_models.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, String + +from dataall.base.db import Base +from dataall.base.db import Resource, utils + + +class Activity(Resource, Base): + __tablename__ = 'activity' + activityUri = Column(String, primary_key=True, default=utils.uuid('activity')) + targetUri = Column(String, nullable=False) + targetType = Column(String, nullable=False) + action = Column(String, nullable=False) + summary = Column(String, nullable=False) diff --git a/backend/dataall/core/cognito_groups/__init__.py b/backend/dataall/core/cognito_groups/__init__.py new file mode 100644 index 000000000..cda15ecf1 --- /dev/null +++ b/backend/dataall/core/cognito_groups/__init__.py @@ -0,0 +1 @@ +from dataall.core.cognito_groups import api diff --git a/backend/dataall/core/cognito_groups/api/__init__.py b/backend/dataall/core/cognito_groups/api/__init__.py new file mode 100644 index 000000000..b79d57556 --- /dev/null +++ b/backend/dataall/core/cognito_groups/api/__init__.py @@ -0,0 +1,3 @@ +from . import input_types, queries, resolvers, types + +__all__ = ['resolvers', 'input_types', 'types', 'queries'] diff --git a/backend/dataall/core/cognito_groups/api/input_types.py b/backend/dataall/core/cognito_groups/api/input_types.py new file mode 100644 index 000000000..728f353d1 --- /dev/null +++ b/backend/dataall/core/cognito_groups/api/input_types.py @@ -0,0 +1,18 @@ +from dataall.base.api import gql + +GroupFilter = gql.InputType( + name='GroupFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +CognitoGroupFilter = gql.InputType( + name='CognitoGroupFilter', + arguments=[ + gql.Argument(name='type', type=gql.String), + gql.Argument(name='uri', type=gql.String), + ], +) diff --git a/backend/dataall/core/cognito_groups/api/queries.py b/backend/dataall/core/cognito_groups/api/queries.py new file mode 100644 index 000000000..c33677b1a --- /dev/null +++ b/backend/dataall/core/cognito_groups/api/queries.py @@ -0,0 +1,18 @@ +from dataall.base.api import gql +from dataall.core.cognito_groups.api.resolvers import get_group, list_cognito_groups + +getGroup = gql.QueryField( + name='getGroup', + args=[gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('Group'), + resolver=get_group, +) + +listCognitoGroups = gql.QueryField( + name='listCognitoGroups', + args=[ + gql.Argument(name='filter', type=gql.Ref('CognitoGroupFilter')), + ], + type=gql.ArrayType(gql.Ref('CognitoGroup')), + resolver=list_cognito_groups +) diff --git a/backend/dataall/core/cognito_groups/api/resolvers.py b/backend/dataall/core/cognito_groups/api/resolvers.py new file mode 100644 index 000000000..c332a550b --- /dev/null +++ b/backend/dataall/core/cognito_groups/api/resolvers.py @@ -0,0 +1,73 @@ +import os +import logging + +from dataall.core.cognito_groups.aws.cognito import Cognito +from dataall.core.cognito_groups.db.cognito_group_models import Group +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.base.db import exceptions + +log = logging.getLogger() + + +def resolve_group_environment_permissions(context, source, environmentUri): + if not source: + return None + with context.engine.scoped_session() as session: + return EnvironmentService.list_group_permissions( + session=session, + uri=environmentUri, + group_uri=source.groupUri + ) + + +def resolve_group_tenant_permissions(context, source): + if not source: + return None + with context.engine.scoped_session() as session: + return TenantPolicy.list_group_tenant_permissions( + session=session, + username=context.username, + groups=context.groups, + uri=source.groupUri, + data=None, + check_perm=True, + ) + + +def get_group(context, source, groupUri): + if not groupUri: + exceptions.RequiredParameter('groupUri') + return Group(groupUri=groupUri, name=groupUri, label=groupUri) + + +def list_cognito_groups(context, source, filter: dict = None): + envname = os.getenv('envname', 'local') + if envname in ['dkrcompose']: + return [{"groupName": 'Engineers'}, {"groupName": 'Scientists'}, {"groupName": 'Requesters'}, {"groupName": 'Producers'}, {"groupName": 'Consumers'}] + current_region = os.getenv('AWS_REGION', 'eu-west-1') + groups = Cognito.list_cognito_groups(envname=envname, region=current_region) + category, category_uri = filter.get("type"), filter.get("uri") + if category and category_uri: + if category == 'environment': + with context.engine.scoped_session() as session: + invited_groups = EnvironmentService.query_all_environment_groups( + session=session, + uri=category_uri, + filter=None, + ).all() + if category == 'organization': + with context.engine.scoped_session() as session: + organization = Organization.get_organization_by_uri(session, category_uri) + invited_groups = Organization.query_organization_groups( + session=session, + uri=organization.organizationUri, + filter=None, + ).all() + invited_group_uris = [item.groupUri for item in invited_groups] + res = [] + for group in groups: + if group['GroupName'] not in invited_group_uris: + res.append({"groupName": group['GroupName']}) + return res diff --git a/backend/dataall/core/cognito_groups/api/types.py b/backend/dataall/core/cognito_groups/api/types.py new file mode 100644 index 000000000..a735298c4 --- /dev/null +++ b/backend/dataall/core/cognito_groups/api/types.py @@ -0,0 +1,54 @@ +from dataall.base.api import gql +from dataall.core.cognito_groups.api.resolvers import * + +Group = gql.ObjectType( + name='Group', + fields=[ + gql.Field(name='groupUri', type=gql.String), + gql.Field(name='invitedBy', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='environmentIAMRoleArn', type=gql.String), + gql.Field(name='environmentIAMRoleName', type=gql.String), + gql.Field(name='environmentAthenaWorkGroup', type=gql.String), + gql.Field( + name='environmentPermissions', + args=[ + gql.Argument( + name='environmentUri', type=gql.NonNullableType(gql.String) + ) + ], + type=gql.ArrayType(gql.Ref('Permission')), + resolver=resolve_group_environment_permissions, + ), + gql.Field( + name='tenantPermissions', + type=gql.ArrayType(gql.Ref('Permission')), + resolver=resolve_group_tenant_permissions, + ), + ], +) + +GroupSearchResult = gql.ObjectType( + name='GroupSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(Group)), + ], +) + +CognitoGroup = gql.ObjectType( + name='CognitoGroup', + fields=[ + gql.Field(name='groupName', type=gql.String), + ], +) diff --git a/backend/dataall/cdkproxy/stacks/policies/__init__.py b/backend/dataall/core/cognito_groups/aws/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/stacks/policies/__init__.py rename to backend/dataall/core/cognito_groups/aws/__init__.py diff --git a/backend/dataall/aws/handlers/cognito.py b/backend/dataall/core/cognito_groups/aws/cognito.py similarity index 95% rename from backend/dataall/aws/handlers/cognito.py rename to backend/dataall/core/cognito_groups/aws/cognito.py index e3c9ea7c2..00930c7f9 100644 --- a/backend/dataall/aws/handlers/cognito.py +++ b/backend/dataall/core/cognito_groups/aws/cognito.py @@ -1,7 +1,7 @@ import logging import boto3 -from .sts import SessionHelper +from dataall.base.aws.sts import SessionHelper log = logging.getLogger(__name__) diff --git a/backend/dataall/tasks/data_sharing/__init__.py b/backend/dataall/core/cognito_groups/db/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/__init__.py rename to backend/dataall/core/cognito_groups/db/__init__.py diff --git a/backend/dataall/core/cognito_groups/db/cognito_group_models.py b/backend/dataall/core/cognito_groups/db/cognito_group_models.py new file mode 100644 index 000000000..085236ccf --- /dev/null +++ b/backend/dataall/core/cognito_groups/db/cognito_group_models.py @@ -0,0 +1,8 @@ +from sqlalchemy import Column, String + +from dataall.base.db import Base, Resource, utils + + +class Group(Resource, Base): + __tablename__ = 'group' + groupUri = Column(String, primary_key=True, default=utils.uuid('group')) diff --git a/backend/dataall/core/environment/__init__.py b/backend/dataall/core/environment/__init__.py new file mode 100644 index 000000000..6a2f7b3f4 --- /dev/null +++ b/backend/dataall/core/environment/__init__.py @@ -0,0 +1,2 @@ +"""The central package of the application to work with the environment""" +from dataall.core.environment import api, cdk, tasks diff --git a/backend/dataall/core/environment/api/__init__.py b/backend/dataall/core/environment/api/__init__.py new file mode 100644 index 000000000..3206bc011 --- /dev/null +++ b/backend/dataall/core/environment/api/__init__.py @@ -0,0 +1,10 @@ +from . import ( + input_types, + mutations, + queries, + resolvers, + enums, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations', "enums"] diff --git a/backend/dataall/core/environment/api/enums.py b/backend/dataall/core/environment/api/enums.py new file mode 100644 index 000000000..228d1afd9 --- /dev/null +++ b/backend/dataall/core/environment/api/enums.py @@ -0,0 +1,15 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class EnvironmentPermission(GraphQLEnumMapper): + Owner = '999' + Admin = '900' + DatasetCreator = '800' + Invited = '200' + ProjectAccess = '050' + NotInvited = '000' + + +class EnvironmentType(GraphQLEnumMapper): + Data = 'Data' + Compute = 'Compute' diff --git a/backend/dataall/core/environment/api/input_types.py b/backend/dataall/core/environment/api/input_types.py new file mode 100644 index 000000000..5603122e6 --- /dev/null +++ b/backend/dataall/core/environment/api/input_types.py @@ -0,0 +1,123 @@ +from dataall.base.api import gql +from dataall.base.api.constants import GraphQLEnumMapper, SortDirection + + +AwsEnvironmentInput = gql.InputType( + name='AwsEnvironmentInput', + arguments=[ + gql.Argument('AwsAccountId', gql.NonNullableType(gql.String)), + gql.Argument('region', gql.NonNullableType(gql.String)), + ], +) + +ModifyEnvironmentParameterInput = gql.InputType( + name='ModifyEnvironmentParameterInput', + arguments=[ + gql.Argument('key', gql.String), + gql.Argument('value', gql.String) + ] +) + +NewEnvironmentInput = gql.InputType( + name='NewEnvironmentInput', + arguments=[ + gql.Argument('label', gql.NonNullableType(gql.String)), + gql.Argument('organizationUri', gql.NonNullableType(gql.String)), + gql.Argument('SamlGroupName', gql.NonNullableType(gql.String)), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('description', gql.String), + gql.Argument('AwsAccountId', gql.NonNullableType(gql.String)), + gql.Argument('region', gql.NonNullableType(gql.String)), + gql.Argument('vpcId', gql.String), + gql.Argument('privateSubnetIds', gql.ArrayType(gql.String)), + gql.Argument('publicSubnetIds', gql.ArrayType(gql.String)), + gql.Argument('EnvironmentDefaultIAMRoleName', gql.String), + gql.Argument('resourcePrefix', gql.String), + gql.Argument('parameters', gql.ArrayType(ModifyEnvironmentParameterInput)) + + ], +) + +ModifyEnvironmentInput = gql.InputType( + name='ModifyEnvironmentInput', + arguments=[ + gql.Argument('label', gql.String), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('SamlGroupName', gql.String), + gql.Argument('vpcId', gql.String), + gql.Argument('privateSubnetIds', gql.ArrayType(gql.String)), + gql.Argument('publicSubnetIds', gql.ArrayType(gql.String)), + gql.Argument('resourcePrefix', gql.String), + gql.Argument('parameters', gql.ArrayType(ModifyEnvironmentParameterInput)) + ], +) + +EnableDataSubscriptionsInput = gql.InputType( + name='EnableDataSubscriptionsInput', + arguments=[ + gql.Argument('producersTopicArn', gql.String), + ], +) + + +class EnvironmentSortField(GraphQLEnumMapper): + created = 'created' + label = 'label' + + +EnvironmentSortCriteria = gql.InputType( + name='EnvironmentSortCriteria', + arguments=[ + gql.Argument( + name='field', type=gql.NonNullableType(EnvironmentSortField.toGraphQLEnum()) + ), + gql.Argument( + name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) + ), + ], +) + +EnvironmentFilter = gql.InputType( + name='EnvironmentFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('displayArchived', gql.Boolean), + gql.Argument('roles', gql.ArrayType(gql.Ref('EnvironmentPermission'))), + gql.Argument('quicksight', gql.Boolean), + gql.Argument('sort', gql.ArrayType(EnvironmentSortCriteria)), + gql.Argument('pageSize', gql.Integer), + ], +) + + +InviteGroupOnEnvironmentInput = gql.InputType( + name='InviteGroupOnEnvironmentInput', + arguments=[ + gql.Argument('permissions', gql.ArrayType(gql.String)), + gql.Argument('environmentUri', gql.NonNullableType(gql.String)), + gql.Argument('groupUri', gql.NonNullableType(gql.String)), + gql.Argument('environmentIAMRoleName', gql.String), + ], +) + +AddConsumptionRoleToEnvironmentInput = gql.InputType( + name='AddConsumptionRoleToEnvironmentInput', + arguments=[ + gql.Argument('consumptionRoleName', gql.NonNullableType(gql.String)), + gql.Argument('groupUri', gql.NonNullableType(gql.String)), + gql.Argument('IAMRoleArn', gql.NonNullableType(gql.String)), + gql.Argument('environmentUri', gql.NonNullableType(gql.String)), + ], +) + +ConsumptionRoleFilter = gql.InputType( + name='ConsumptionRoleFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + gql.Argument('groupUri', gql.NonNullableType(gql.String)), + ], +) diff --git a/backend/dataall/core/environment/api/mutations.py b/backend/dataall/core/environment/api/mutations.py new file mode 100644 index 000000000..0b60a92a5 --- /dev/null +++ b/backend/dataall/core/environment/api/mutations.py @@ -0,0 +1,112 @@ +from dataall.base.api import gql + +from dataall.core.environment.api.input_types import ( + ModifyEnvironmentInput, + NewEnvironmentInput, + EnableDataSubscriptionsInput, + InviteGroupOnEnvironmentInput, + AddConsumptionRoleToEnvironmentInput +) +from dataall.core.environment.api.resolvers import * + +createEnvironment = gql.MutationField( + name='createEnvironment', + args=[gql.Argument(name='input', type=gql.NonNullableType(NewEnvironmentInput))], + type=gql.Ref('Environment'), + resolver=create_environment, + test_scope='Environment', +) + +updateEnvironment = gql.MutationField( + name='updateEnvironment', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.NonNullableType(ModifyEnvironmentInput)), + ], + type=gql.Ref('Environment'), + resolver=update_environment, + test_scope='Environment', +) + +inviteGroupOnEnvironment = gql.MutationField( + name='inviteGroupOnEnvironment', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(InviteGroupOnEnvironmentInput) + ) + ], + type=gql.Ref('Environment'), + resolver=invite_group, +) + +addConsumptionRoleToEnvironment = gql.MutationField( + name='addConsumptionRoleToEnvironment', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(AddConsumptionRoleToEnvironmentInput) + ) + ], + type=gql.Ref('ConsumptionRole'), + resolver=add_consumption_role, +) + +updateGroupPermission = gql.MutationField( + name='updateGroupEnvironmentPermissions', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(InviteGroupOnEnvironmentInput) + ) + ], + type=gql.Ref('Environment'), + resolver=update_group_permissions, +) + +removeGroupFromEnvironment = gql.MutationField( + name='removeGroupFromEnvironment', + args=[ + gql.Argument('environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument('groupUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Ref('Environment'), + resolver=remove_group, +) + +removeConsumptionRoleFromEnvironment = gql.MutationField( + name='removeConsumptionRoleFromEnvironment', + args=[ + gql.Argument('environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument('consumptionRoleUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Boolean, + resolver=remove_consumption_role, +) + +deleteEnvironment = gql.MutationField( + name='deleteEnvironment', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='deleteFromAWS', type=gql.Boolean), + ], + resolver=delete_environment, + type=gql.Boolean, +) + + +EnableDataSubscriptions = gql.MutationField( + name='enableDataSubscriptions', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=EnableDataSubscriptionsInput), + ], + resolver=enable_subscriptions, + type=gql.Boolean, +) + +DisableDataSubscriptions = gql.MutationField( + name='DisableDataSubscriptions', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + ], + resolver=disable_subscriptions, + type=gql.Boolean, +) diff --git a/backend/dataall/core/environment/api/queries.py b/backend/dataall/core/environment/api/queries.py new file mode 100644 index 000000000..e3af67414 --- /dev/null +++ b/backend/dataall/core/environment/api/queries.py @@ -0,0 +1,161 @@ +from dataall.base.api import gql + +from dataall.core.environment.api.input_types import EnvironmentFilter +from dataall.core.environment.api.resolvers import * +from dataall.core.environment.api.types import Environment, EnvironmentSearchResult + + +getTrustAccount = gql.QueryField( + name='getTrustAccount', + type=gql.String, + resolver=get_trust_account, + test_scope='Environment', +) + +getEnvironment = gql.QueryField( + name='getEnvironment', + args=[gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String))], + type=gql.Thunk(lambda: Environment), + resolver=get_environment, + test_scope='Environment', +) + + +listEnvironments = gql.QueryField( + name='listEnvironments', + args=[gql.Argument('filter', EnvironmentFilter)], + type=EnvironmentSearchResult, + resolver=list_environments, + test_scope='Environment', +) + +listEnvironmentNetworks = gql.QueryField( + name='listEnvironmentNetworks', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('VpcFilter')), + ], + resolver=list_environment_networks, + type=gql.Ref('VpcSearchResult'), + test_scope='Environment', +) + + +generateEnvironmentAccessToken = gql.QueryField( + name='generateEnvironmentAccessToken', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.String), + ], + type=gql.String, + resolver=generate_environment_access_token, + test_scope='Environment', +) + + +getEnvironmentAssumeRoleUrl = gql.QueryField( + name='getEnvironmentAssumeRoleUrl', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.String), + ], + type=gql.String, + resolver=get_environment_assume_role_url, + test_scope='Environment', +) + + +listEnvironmentInvitedGroups = gql.QueryField( + name='listEnvironmentInvitedGroups', + type=gql.Ref('GroupSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + resolver=list_environment_invited_groups, +) + +listEnvironmentGroups = gql.QueryField( + name='listEnvironmentGroups', + type=gql.Ref('GroupSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + resolver=list_environment_groups, +) + +listAllEnvironmentGroups = gql.QueryField( + name='listAllEnvironmentGroups', + type=gql.Ref('GroupSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + resolver=list_all_environment_groups, +) + +listEnvironmentConsumptionRoles = gql.QueryField( + name='listEnvironmentConsumptionRoles', + type=gql.Ref('ConsumptionRoleSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('ConsumptionRoleFilter')), + ], + resolver=list_environment_consumption_roles, +) + + +listAllEnvironmentConsumptionRoles = gql.QueryField( + name='listAllEnvironmentConsumptionRoles', + type=gql.Ref('ConsumptionRoleSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('ConsumptionRoleFilter')), + ], + resolver=list_all_environment_consumption_roles, +) + +listEnvironmentGroupInvitationPermissions = gql.QueryField( + name='listEnvironmentGroupInvitationPermissions', + args=[ + gql.Argument(name='environmentUri', type=gql.String), + ], + type=gql.ArrayType(gql.Ref('Permission')), + resolver=list_environment_group_invitation_permissions, +) + + +getPivotRolePresignedUrl = gql.QueryField( + name='getPivotRolePresignedUrl', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_pivot_role_template, + test_scope='Environment', +) + +getCDKExecPolicyPresignedUrl = gql.QueryField( + name='getCDKExecPolicyPresignedUrl', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_cdk_exec_policy_template, + test_scope='Environment', +) + + +getPivotRoleExternalId = gql.QueryField( + name='getPivotRoleExternalId', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_external_id, + test_scope='Environment', +) + + +getPivotRoleName = gql.QueryField( + name='getPivotRoleName', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_pivot_role_name, + test_scope='Environment', +) diff --git a/backend/dataall/core/environment/api/resolvers.py b/backend/dataall/core/environment/api/resolvers.py new file mode 100644 index 000000000..112b89ce2 --- /dev/null +++ b/backend/dataall/core/environment/api/resolvers.py @@ -0,0 +1,685 @@ +import json +import logging +import os + +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError +from sqlalchemy import and_, exc + +from dataall.base.aws.iam import IAM +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.base.aws.sts import SessionHelper +from dataall.base.utils import Parameter +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.environment.api.enums import EnvironmentPermission +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.aws.cloudformation import CloudFormation +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.vpc.db.vpc_repositories import Vpc +from dataall.base.db import exceptions +from dataall.core.permissions import permissions +from dataall.core.feature_toggle_checker import is_feature_enabled +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) +from dataall.core.organizations.api.resolvers import * + +log = logging.getLogger() + + +def get_trust_account(context: Context, source, **kwargs): + current_account = SessionHelper.get_account() + print('current_account = ', current_account) + return current_account + + +def get_pivot_role_as_part_of_environment(context: Context, source, **kwargs): + ssm_param = ParameterStoreManager.get_parameter_value(region=os.getenv('AWS_REGION', 'eu-west-1'), parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate") + return True if ssm_param == "True" else False + + +def check_environment(context: Context, source, account_id, region): + """ Checks necessary resources for environment deployment. + - Check CDKToolkit exists in Account assuming cdk_look_up_role + - Check Pivot Role exists in Account if pivot_role_as_part_of_environment is False + Args: + input: environment creation input + """ + pivot_role_as_part_of_environment = get_pivot_role_as_part_of_environment(context, source) + log.info(f"Creating environment. Pivot role as part of environment = {pivot_role_as_part_of_environment}") + ENVNAME = os.environ.get('envname', 'local') + if ENVNAME == 'pytest': + return 'CdkRoleName' + + cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( + accountid=account_id, region=region + ) + cdk_role_name = CloudFormation.check_existing_cdk_toolkit_stack( + AwsAccountId=account_id, region=region + ) + if not pivot_role_as_part_of_environment: + log.info("Check if PivotRole exist in the account") + pivot_role_arn = SessionHelper.get_delegation_role_arn(accountid=account_id) + role = IAM.get_role(account_id=account_id, role_arn=pivot_role_arn, role=cdk_look_up_role_arn) + if not role: + raise exceptions.AWSResourceNotFound( + action='CHECK_PIVOT_ROLE', + message='Pivot Role has not been created in the Environment AWS Account', + ) + + return cdk_role_name + + +def create_environment(context: Context, source, input=None): + if input.get('SamlGroupName') and input.get('SamlGroupName') not in context.groups: + raise exceptions.UnauthorizedOperation( + action=permissions.LINK_ENVIRONMENT, + message=f'User: {context.username} is not a member of the group {input["SamlGroupName"]}', + ) + + with context.engine.scoped_session() as session: + cdk_role_name = check_environment(context, source, + account_id=input.get('AwsAccountId'), + region=input.get('region') + ) + input['cdk_role_name'] = cdk_role_name + env = EnvironmentService.create_environment( + session=session, + uri=input.get('organizationUri'), + data=input, + ) + Stack.create_stack( + session=session, + environment_uri=env.environmentUri, + target_type='environment', + target_uri=env.environmentUri, + target_label=env.label, + ) + stack_helper.deploy_stack(targetUri=env.environmentUri) + env.userRoleInEnvironment = EnvironmentPermission.Owner.value + return env + + +def update_environment( + context: Context, source, environmentUri: str = None, input: dict = None +): + if input.get('SamlGroupName') and input.get('SamlGroupName') not in context.groups: + raise exceptions.UnauthorizedOperation( + action=permissions.LINK_ENVIRONMENT, + message=f'User: {context.username} is not part of the group {input["SamlGroupName"]}', + ) + + with context.engine.scoped_session() as session: + + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + cdk_role_name = check_environment(context, source, + account_id=environment.AwsAccountId, + region=environment.region + ) + + previous_resource_prefix = environment.resourcePrefix + + environment = EnvironmentService.update_environment( + session, + uri=environmentUri, + data=input, + ) + + if EnvironmentResourceManager.deploy_updated_stack(session, previous_resource_prefix, environment): + stack_helper.deploy_stack(targetUri=environment.environmentUri) + + return environment + + +def invite_group(context: Context, source, input): + with context.engine.scoped_session() as session: + environment, environment_group = EnvironmentService.invite_group( + session=session, + uri=input['environmentUri'], + data=input, + ) + + stack_helper.deploy_stack(targetUri=environment.environmentUri) + + return environment + + +def add_consumption_role(context: Context, source, input): + with context.engine.scoped_session() as session: + env = EnvironmentService.get_environment_by_uri(session, input['environmentUri']) + role = IAM.get_role(env.AwsAccountId, input['IAMRoleArn']) + if not role: + raise exceptions.AWSResourceNotFound( + action='ADD_CONSUMPTION_ROLE', + message=f"{input['IAMRoleArn']} does not exist in this account", + ) + consumption_role = EnvironmentService.add_consumption_role( + session=session, + uri=input['environmentUri'], + data=input, + ) + + return consumption_role + + +def update_group_permissions(context, source, input): + with context.engine.scoped_session() as session: + environment = EnvironmentService.update_group_permissions( + session=session, + uri=input['environmentUri'], + data=input, + ) + + stack_helper.deploy_stack(targetUri=environment.environmentUri) + + return environment + + +def remove_group(context: Context, source, environmentUri=None, groupUri=None): + with context.engine.scoped_session() as session: + environment = EnvironmentService.remove_group( + session=session, + uri=environmentUri, + group=groupUri, + ) + + stack_helper.deploy_stack(targetUri=environment.environmentUri) + + return environment + + +def remove_consumption_role(context: Context, source, environmentUri=None, consumptionRoleUri=None): + with context.engine.scoped_session() as session: + status = EnvironmentService.remove_consumption_role( + session=session, + uri=consumptionRoleUri, + env_uri=environmentUri, + ) + + return status + + +def list_environment_invited_groups( + context: Context, source, environmentUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_environment_invited_groups( + session=session, + uri=environmentUri, + data=filter, + ) + + +def list_environment_groups(context: Context, source, environmentUri=None, filter=None): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_user_environment_groups( + session=session, + uri=environmentUri, + data=filter, + ) + + +def list_all_environment_groups( + context: Context, source, environmentUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_all_environment_groups( + session=session, + uri=environmentUri, + data=filter, + ) + + +def list_environment_consumption_roles( + context: Context, source, environmentUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_user_environment_consumption_roles( + session=session, + uri=environmentUri, + data=filter, + ) + + +def list_all_environment_consumption_roles( + context: Context, source, environmentUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_all_environment_consumption_roles( + session=session, + uri=environmentUri, + data=filter, + ) + + +def list_environment_group_invitation_permissions( + context: Context, + source, + environmentUri=None, +): + with context.engine.scoped_session() as session: + return EnvironmentService.list_group_invitation_permissions( + session=session, + username=context.username, + groups=context.groups, + uri=environmentUri, + ) + + +def list_environments(context: Context, source, filter=None): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_user_environments(session, filter) + + +def list_environment_networks( + context: Context, source, environmentUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return EnvironmentService.paginated_environment_networks( + session=session, + uri=environmentUri, + data=filter, + ) + + +def get_parent_organization(context: Context, source, **kwargs): + org = get_organization(context, source, organizationUri=source.organizationUri) + return org + + +def resolve_vpc_list(context: Context, source, **kwargs): + with context.engine.scoped_session() as session: + return Vpc.get_environment_vpc_list( + session=session, environment_uri=source.environmentUri + ) + + +def get_environment(context: Context, source, environmentUri: str = None): + with context.engine.scoped_session() as session: + return EnvironmentService.find_environment_by_uri(session, uri=environmentUri) + + +def resolve_user_role(context: Context, source: Environment): + if source.owner == context.username: + return EnvironmentPermission.Owner.value + elif source.SamlGroupName in context.groups: + return EnvironmentPermission.Admin.value + else: + with context.engine.scoped_session() as session: + env_group = ( + session.query(EnvironmentGroup) + .filter( + and_( + EnvironmentGroup.environmentUri == source.environmentUri, + EnvironmentGroup.groupUri.in_(context.groups), + ) + ) + .first() + ) + if env_group: + return EnvironmentPermission.Invited.value + return EnvironmentPermission.NotInvited.value + + +def list_environment_group_permissions( + context, source, environmentUri: str = None, groupUri: str = None +): + with context.engine.scoped_session() as session: + return EnvironmentService.list_group_permissions( + session=session, + uri=environmentUri, + group_uri=groupUri + ) + + +@is_feature_enabled('core.features.env_aws_actions') +def _get_environment_group_aws_session( + session, username, groups, environment, groupUri=None +): + if groupUri and groupUri not in groups: + raise exceptions.UnauthorizedOperation( + action='ENVIRONMENT_AWS_ACCESS', + message=f'User: {username} is not member of the team {groupUri}', + ) + pivot_session = SessionHelper.remote_session(environment.AwsAccountId) + if not groupUri: + if environment.SamlGroupName in groups: + aws_session = SessionHelper.get_session( + base_session=pivot_session, + role_arn=environment.EnvironmentDefaultIAMRoleArn, + ) + else: + raise exceptions.UnauthorizedOperation( + action='ENVIRONMENT_AWS_ACCESS', + message=f'User: {username} is not member of the environment admins team {environment.SamlGroupName}', + ) + else: + env_group: EnvironmentGroup = ( + session.query(EnvironmentGroup) + .filter( + EnvironmentGroup.environmentUri == environment.environmentUri, + EnvironmentGroup.groupUri == groupUri, + ) + .first() + ) + if not env_group: + raise exceptions.UnauthorizedOperation( + action='ENVIRONMENT_AWS_ACCESS', + message=f'Team {groupUri} is not invited to the environment {environment.name}', + ) + else: + aws_session = SessionHelper.get_session( + base_session=pivot_session, + role_arn=env_group.environmentIAMRoleArn, + ) + if not aws_session: + raise exceptions.AWSResourceNotFound( + action='ENVIRONMENT_AWS_ACCESS', + message=f'Failed to start an AWS session on environment {environment.AwsAccountId}', + ) + return aws_session + + +@is_feature_enabled('core.features.env_aws_actions') +def get_environment_assume_role_url( + context: Context, + source, + environmentUri: str = None, + groupUri: str = None, +): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=environmentUri, + permission_name=permissions.CREDENTIALS_ENVIRONMENT, + ) + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + url = SessionHelper.get_console_access_url( + _get_environment_group_aws_session( + session=session, + username=context.username, + groups=context.groups, + environment=environment, + groupUri=groupUri, + ), + region=environment.region, + ) + return url + + +@is_feature_enabled('core.features.env_aws_actions') +def generate_environment_access_token( + context, source, environmentUri: str = None, groupUri: str = None +): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=environmentUri, + permission_name=permissions.CREDENTIALS_ENVIRONMENT, + ) + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + c = _get_environment_group_aws_session( + session=session, + username=context.username, + groups=context.groups, + environment=environment, + groupUri=groupUri, + ).get_credentials() + credentials = { + 'AccessKey': c.access_key, + 'SessionKey': c.secret_key, + 'sessionToken': c.token, + } + return json.dumps(credentials) + + +def get_environment_stack(context: Context, source: Environment, **kwargs): + return stack_helper.get_stack_with_cfn_resources( + targetUri=source.environmentUri, + environmentUri=source.environmentUri, + ) + + +def delete_environment( + context: Context, source, environmentUri: str = None, deleteFromAWS: bool = False +): + with context.engine.scoped_session() as session: + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + EnvironmentService.delete_environment( + session, + uri=environmentUri, + environment=environment + ) + + if deleteFromAWS: + stack_helper.delete_stack( + target_uri=environmentUri, + accountid=environment.AwsAccountId, + cdk_role_arn=environment.CDKRoleArn, + region=environment.region, + ) + + return True + + +def enable_subscriptions( + context: Context, source, environmentUri: str = None, input: dict = None +): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=environmentUri, + permission_name=permissions.ENABLE_ENVIRONMENT_SUBSCRIPTIONS, + ) + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + if input.get('producersTopicArn'): + environment.subscriptionsProducersTopicName = input.get('producersTopicArn') + environment.subscriptionsProducersTopicImported = True + + else: + environment.subscriptionsProducersTopicName = NamingConventionService( + target_label=f'{environment.label}-producers-topic', + target_uri=environment.environmentUri, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + + environment.subscriptionsConsumersTopicName = NamingConventionService( + target_label=f'{environment.label}-consumers-topic', + target_uri=environment.environmentUri, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + environment.subscriptionsConsumersTopicImported = False + environment.subscriptionsEnabled = True + session.commit() + stack_helper.deploy_stack(targetUri=environment.environmentUri) + return True + + +def disable_subscriptions(context: Context, source, environmentUri: str = None): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=environmentUri, + permission_name=permissions.ENABLE_ENVIRONMENT_SUBSCRIPTIONS, + ) + environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + + environment.subscriptionsConsumersTopicName = None + environment.subscriptionsConsumersTopicImported = False + environment.subscriptionsProducersTopicName = None + environment.subscriptionsProducersTopicImported = False + environment.subscriptionsEnabled = False + session.commit() + stack_helper.deploy_stack(targetUri=environment.environmentUri) + return True + + +def get_pivot_role_template(context: Context, source, organizationUri=None): + from dataall.base.utils import Parameter + + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=organizationUri, + permission_name=permissions.GET_ORGANIZATION, + ) + pivot_role_bucket = Parameter().get_parameter( + env=os.getenv('envname', 'local'), path='s3/resources_bucket_name' + ) + pivot_role_bucket_key = Parameter().get_parameter( + env=os.getenv('envname', 'local'), path='s3/pivot_role_prefix' + ) + if not pivot_role_bucket or not pivot_role_bucket_key: + raise exceptions.AWSResourceNotFound( + action='GET_PIVOT_ROLE_TEMPLATE', + message='Pivot Yaml template file could not be found on Amazon S3 bucket', + ) + try: + s3_client = boto3.client( + 's3', + region_name=os.getenv('AWS_REGION', 'eu-central-1'), + config=Config( + signature_version='s3v4', s3={'addressing_style': 'virtual'} + ), + ) + presigned_url = s3_client.generate_presigned_url( + 'get_object', + Params=dict( + Bucket=pivot_role_bucket, + Key=pivot_role_bucket_key, + ), + ExpiresIn=15 * 60, + ) + return presigned_url + except ClientError as e: + log.error( + f'Failed to get presigned URL for pivot role template due to: {e}' + ) + raise e + + +def get_cdk_exec_policy_template(context: Context, source, organizationUri=None): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=organizationUri, + permission_name=permissions.GET_ORGANIZATION, + ) + cdk_exec_policy_bucket = Parameter().get_parameter( + env=os.getenv('envname', 'local'), path='s3/resources_bucket_name' + ) + cdk_exec_policy_bucket_key = Parameter().get_parameter( + env=os.getenv('envname', 'local'), path='s3/cdk_exec_policy_prefix' + ) + if not cdk_exec_policy_bucket or not cdk_exec_policy_bucket_key: + raise exceptions.AWSResourceNotFound( + action='GET_CDK_EXEC_POLICY_TEMPLATE', + message='CDK Exec Yaml template file could not be found on Amazon S3 bucket', + ) + try: + s3_client = boto3.client( + 's3', + region_name=os.getenv('AWS_REGION', 'eu-central-1'), + config=Config( + signature_version='s3v4', s3={'addressing_style': 'virtual'} + ), + ) + presigned_url = s3_client.generate_presigned_url( + 'get_object', + Params=dict( + Bucket=cdk_exec_policy_bucket, + Key=cdk_exec_policy_bucket_key, + ), + ExpiresIn=15 * 60, + ) + return presigned_url + except ClientError as e: + log.error( + f'Failed to get presigned URL for CDK Exec role template due to: {e}' + ) + raise e + + +def get_external_id(context: Context, source, organizationUri=None): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=organizationUri, + permission_name=permissions.GET_ORGANIZATION, + ) + external_id = SessionHelper.get_external_id_secret() + if not external_id: + raise exceptions.AWSResourceNotFound( + action='GET_EXTERNAL_ID', + message='External Id could not be found on AWS Secretsmanager', + ) + return external_id + + +def get_pivot_role_name(context: Context, source, organizationUri=None): + with context.engine.scoped_session() as session: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=organizationUri, + permission_name=permissions.GET_ORGANIZATION, + ) + pivot_role_name = SessionHelper.get_delegation_role_name() + if not pivot_role_name: + raise exceptions.AWSResourceNotFound( + action='GET_PIVOT_ROLE_NAME', + message='Pivot role name could not be found on AWS Systems Manager - Parameter Store', + ) + return pivot_role_name + + +def resolve_environment(context, source, **kwargs): + """Resolves the environment for a environmental resource""" + if not source: + return None + with context.engine.scoped_session() as session: + return session.query(Environment).get(source.environmentUri) + + +def resolve_parameters(context, source: Environment, **kwargs): + """Resolves a parameters for the environment""" + if not source: + return None + with context.engine.scoped_session() as session: + return EnvironmentService.get_environment_parameters(session, source.environmentUri) diff --git a/backend/dataall/core/environment/api/types.py b/backend/dataall/core/environment/api/types.py new file mode 100644 index 000000000..fad61d7c3 --- /dev/null +++ b/backend/dataall/core/environment/api/types.py @@ -0,0 +1,154 @@ +from dataall.base.api import gql + +from dataall.core.environment.api.resolvers import * +from dataall.core.environment.api.enums import EnvironmentPermission + + +EnvironmentUserPermission = gql.ObjectType( + name='EnvironmentUserPermission', + fields=[ + gql.Field(name='userName', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='userRoleInEnvironment', type=gql.Ref('EnvironmentPermission')), + ], +) + +EnvironmentUserPermissionSearchResult = gql.ObjectType( + name='EnvironmentUserPermissionSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(EnvironmentUserPermission)), + ], +) + + +EnvironmentGroupPermission = gql.ObjectType( + name='EnvironmentGroupPermission', + fields=[ + gql.Field(name='groupUri', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field( + name='groupRoleInEnvironment', type=EnvironmentPermission.toGraphQLEnum() + ), + gql.Field(name='Group', type=gql.Ref('Group')), + ], +) + +EnvironmentGroupPermissionSearchResult = gql.ObjectType( + name='EnvironmentGroupPermissionSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(EnvironmentGroupPermission)), + ], +) + + +EnvironmentParameter = gql.ObjectType( + name='EnvironmentParameter', + fields=[ + gql.Field(name='key', type=gql.String), + gql.Field(name='value', type=gql.String), + ] +) + +Environment = gql.ObjectType( + name='Environment', + fields=[ + gql.Field(name='environmentUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='admins', type=gql.ArrayType(gql.String)), + gql.Field(name='environmentType', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='SamlGroupName', type=gql.String), + gql.Field(name='resourcePrefix', type=gql.String), + gql.Field(name='EnvironmentDefaultIAMRoleArn', type=gql.String), + gql.Field(name='EnvironmentDefaultIAMRoleName', type=gql.String), + gql.Field(name='EnvironmentDefaultIAMRoleImported', type=gql.Boolean), + gql.Field(name='datasets', type=gql.String), + gql.Field( + name='organization', + type=gql.Ref('Organization'), + resolver=get_parent_organization, + ), + gql.Field( + 'userRoleInEnvironment', + type=EnvironmentPermission.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field('validated', type=gql.Boolean), + gql.Field('roleCreated', type=gql.Boolean), + gql.Field('isOrganizationDefaultEnvironment', type=gql.Boolean), + gql.Field('stack', type=gql.Ref('Stack'), resolver=get_environment_stack), + gql.Field('subscriptionsEnabled', type=gql.Boolean), + gql.Field('subscriptionsProducersTopicImported', type=gql.Boolean), + gql.Field('subscriptionsConsumersTopicImported', type=gql.Boolean), + gql.Field('subscriptionsConsumersTopicName', type=gql.String), + gql.Field('subscriptionsProducersTopicName', type=gql.String), + gql.Field('EnvironmentDefaultBucketName', type=gql.String), + gql.Field('EnvironmentDefaultAthenaWorkGroup', type=gql.String), + gql.Field( + name='networks', + type=gql.ArrayType(gql.Ref('Vpc')), + resolver=resolve_vpc_list, + ), + gql.Field( + name='parameters', + resolver=resolve_parameters, + type=gql.ArrayType(gql.Ref('EnvironmentParameter')), + ), + ], +) + + +EnvironmentSearchResult = gql.ObjectType( + name='EnvironmentSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(Environment)), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) + + +ConsumptionRole = gql.ObjectType( + name='ConsumptionRole', + fields=[ + gql.Field(name='consumptionRoleUri', type=gql.String), + gql.Field(name='consumptionRoleName', type=gql.String), + gql.Field(name='groupUri', type=gql.String), + gql.Field(name='environmentUri', type=gql.String), + gql.Field(name='IAMRoleArn', type=gql.String), + gql.Field(name='IAMRoleName', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + ], +) + +ConsumptionRoleSearchResult = gql.ObjectType( + name='ConsumptionRoleSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(ConsumptionRole)), + ], +) diff --git a/backend/dataall/tasks/data_sharing/share_processors/__init__.py b/backend/dataall/core/environment/cdk/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_processors/__init__.py rename to backend/dataall/core/environment/cdk/__init__.py diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/__init__.py b/backend/dataall/core/environment/cdk/env_role_core_policies/__init__.py new file mode 100644 index 000000000..1019f7f2d --- /dev/null +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/__init__.py @@ -0,0 +1,7 @@ +"""Contains the code for creating environment policies""" + +from dataall.core.environment.cdk.env_role_core_policies import ( + cloudformation, data_policy, service_policy, athena, secretsmanager, sqs, ssm +) + +__all__ = ["cloudformation", "data_policy", "service_policy", "athena", "secretsmanager", "sqs", "ssm"] diff --git a/backend/dataall/cdkproxy/stacks/policies/athena.py b/backend/dataall/core/environment/cdk/env_role_core_policies/athena.py similarity index 97% rename from backend/dataall/cdkproxy/stacks/policies/athena.py rename to backend/dataall/core/environment/cdk/env_role_core_policies/athena.py index 938029262..c60ec87a4 100644 --- a/backend/dataall/cdkproxy/stacks/policies/athena.py +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/athena.py @@ -9,7 +9,7 @@ class Athena(ServicePolicy): - Work with team workgroup - Store query results in environment S3 Bucket location for the team workgroup (access to other S3 locations is restricted) """ - def get_statements(self): + def get_statements(self, group_permissions, **kwargs): statements = [ iam.PolicyStatement( # sid="ListAthena", diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py b/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py new file mode 100644 index 000000000..50b751927 --- /dev/null +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py @@ -0,0 +1,53 @@ +from .service_policy import ServicePolicy +from aws_cdk import aws_iam as iam + + +class Cloudformation(ServicePolicy): + """ + Class including all permissions needed to work with AWS CloudFormation. + It allows data.all users to: + - Create/Delete CloudFormation team stacks + - Create an S3 Bucket for codepipeline prefixed by "cf-templates-" + - Read/Write to and from S3 Buckets prefixed by "cf-templates-" + """ + def get_statements(self, group_permissions, **kwargs): + statements = [ + iam.PolicyStatement( + # sid="GenericCloudFormation", + actions=[ + 'cloudformation:EstimateTemplateCost', + 'cloudformation:ListStacks', + 'cloudformation:ValidateTemplate', + 'cloudformation:GetTemplateSummary', + 'cloudformation:ListExports', + 'cloudformation:ListImports', + 'cloudformation:DescribeAccountLimits', + 'cloudformation:DescribeStackDriftDetectionStatus', + 'cloudformation:Cancel*', + 'cloudformation:Continue*', + 'cloudformation:CreateChangeSet', + 'cloudformation:ExecuteChangeSet', + 'cloudformation:CreateStackSet', + 'cloudformation:Get*', + 'cloudformation:Describe*', + 'cloudformation:List*', + 'cloudformation:CreateUploadBucket', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="DeleteTeamCloudFormation", + actions=[ + 'cloudformation:DeleteStack', + ], + resources=[ + f'arn:aws:cloudformation:{self.region}:{self.account}:*/{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/data_policy.py b/backend/dataall/core/environment/cdk/env_role_core_policies/data_policy.py new file mode 100644 index 000000000..224413507 --- /dev/null +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/data_policy.py @@ -0,0 +1,76 @@ +import logging +from typing import List + +from aws_cdk import aws_iam as iam + +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup + +logger = logging.getLogger() + + +class S3Policy: + """ + Class including all permissions needed to work with AWS Lambda. + It allows data.all users to: + - + """ + def __init__( + self, + stack, + id, + name, + account, + region, + tag_key, + tag_value, + resource_prefix, + environment: Environment, + team: EnvironmentGroup, + ): + self.stack = stack + self.id = id + self.name = name + self.account = account + self.region = region + self.tag_key = tag_key + self.tag_value = tag_value + self.resource_prefix = resource_prefix + self.environment = environment + self.team = team + + def generate_data_access_policy(self, session) -> iam.Policy: + """ + Creates aws_iam.Policy based on team datasets + """ + statements: List[iam.PolicyStatement] = self.get_statements(session) + + for extension in S3Policy.__subclasses__(): + statements.extend(extension.get_statements(self, session=session)) + + policy: iam.Policy = iam.Policy( + self.stack, + self.id, + policy_name=self.name, + statements=statements, + ) + logger.debug(f'Final generated policy {policy.document.to_json()}') + + return policy + + def get_statements(self, *args, **kwargs): + statements = [ + iam.PolicyStatement( + sid="ListAll", + actions=[ + "s3:ListAllMyBuckets", + "s3:ListAccessPoints", + "s3:GetBucketLocation", + 'kms:ListAliases', + 'kms:ListKeys', + ], + resources=["*"], + effect=iam.Effect.ALLOW + ) + ] + + return statements diff --git a/backend/dataall/cdkproxy/stacks/policies/secretsmanager.py b/backend/dataall/core/environment/cdk/env_role_core_policies/secretsmanager.py similarity index 96% rename from backend/dataall/cdkproxy/stacks/policies/secretsmanager.py rename to backend/dataall/core/environment/cdk/env_role_core_policies/secretsmanager.py index 54604542a..91c95e2d1 100644 --- a/backend/dataall/cdkproxy/stacks/policies/secretsmanager.py +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/secretsmanager.py @@ -9,7 +9,7 @@ class SecretsManager(ServicePolicy): - """ - def get_statements(self): + def get_statements(self, group_permissions, **kwargs): statements = [ aws_iam.PolicyStatement( # sid="SecretsReadAll", diff --git a/backend/dataall/cdkproxy/stacks/policies/service_policy.py b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py similarity index 75% rename from backend/dataall/cdkproxy/stacks/policies/service_policy.py rename to backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py index abdc36ddf..c15ac0e0e 100644 --- a/backend/dataall/cdkproxy/stacks/policies/service_policy.py +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py @@ -3,8 +3,7 @@ from aws_cdk import aws_iam -from ....db import permissions -from ....db import models +from dataall.core.environment.db.environment_models import EnvironmentGroup, Environment logger = logging.getLogger() @@ -24,8 +23,8 @@ def __init__( tag_key, tag_value, resource_prefix, - environment: models.Environment, - team: models.EnvironmentGroup, + environment: Environment, + team: EnvironmentGroup, permissions, ): self.stack = stack @@ -45,19 +44,6 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]: """ Creates aws_iam.Policy based on declared subclasses of Policy object """ - from ._lambda import Lambda - from .athena import Athena - from .cloudformation import Cloudformation - from .aws_cicd import AwsCICD - from .databrew import Databrew - from .glue import Glue, GlueCatalog - from .quicksight import QuickSight - from .sagemaker import Sagemaker - from .secretsmanager import SecretsManager - from .sqs import SQS - from .ssm import SSM - from .stepfunctions import StepFunctions - policies: [aws_iam.ManagedPolicy] = [ # This policy adds some minimum actions required independent from the services enabled for the group aws_iam.ManagedPolicy( @@ -123,27 +109,12 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]: services = ServicePolicy.__subclasses__() - if permissions.CREATE_DATASET not in self.permissions: - services.remove(Databrew) - services.remove(Glue) - if ( - permissions.CREATE_NOTEBOOK not in self.permissions - and permissions.CREATE_SGMSTUDIO_NOTEBOOK not in self.permissions - ): - services.remove(Sagemaker) - if permissions.CREATE_PIPELINE not in self.permissions: - services.remove(Lambda) - services.remove(AwsCICD) - services.remove(StepFunctions) - if permissions.CREATE_DASHBOARD not in self.permissions: - services.remove(QuickSight) - statements = [] for service in services: - statements.extend(service.get_statements(self)) + statements.extend(service.get_statements(self, self.permissions)) statements_chunks: list = [ - statements[i : i + 10] for i in range(0, len(statements), 10) + statements[i: i + 10] for i in range(0, len(statements), 10) ] for index, chunk in enumerate(statements_chunks): @@ -157,7 +128,7 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]: ) return policies - def get_statements(self, **kwargs) -> List[aws_iam.PolicyStatement]: + def get_statements(self, group_permissions, **kwargs) -> List[aws_iam.PolicyStatement]: """ This method implements a policy based on a tag key and optionally a resource prefix :return: list diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/sqs.py b/backend/dataall/core/environment/cdk/env_role_core_policies/sqs.py new file mode 100644 index 000000000..56679be1d --- /dev/null +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/sqs.py @@ -0,0 +1,57 @@ +from .service_policy import ServicePolicy +from aws_cdk import aws_iam + + +class SQS(ServicePolicy): + """ + Class including all permissions needed to work with AWS SQS queues. + """ + def get_statements(self, group_permissions, **kwargs): + + statements = [ + aws_iam.PolicyStatement( + # sid='SQSRead', + effect=aws_iam.Effect.ALLOW, + actions=[ + "sqs:ListQueues", + ], + resources=["*"] + ), + aws_iam.PolicyStatement( + # sid='SQSCreate', + effect=aws_iam.Effect.ALLOW, + actions=[ + "sqs:CreateQueue", + "sqs:TagQueue", + ], + resources=[f"arn:aws:sqs:*:{self.account}:{self.resource_prefix}*"], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value] + } + } + ), + aws_iam.PolicyStatement( + # sid='SQSManageTeamQueue', + effect=aws_iam.Effect.ALLOW, + actions=[ + "sqs:GetQueueUrl", + "sqs:DeleteQueue", + "sqs:GetQueueAttributes", + "sqs:SetQueueAttributes", + "sqs:ListQueueTags", + "sqs:ListDeadLetterSourceQueues", + "sqs:SendMessage", + "sqs:ReceiveMessage", + "sqs:DeleteMessage", + "sqs:ChangeMessageVisibility", + ], + resources=[f"arn:aws:sqs:*:{self.account}:{self.resource_prefix}*"], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + } + ) + ] + return statements diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/ssm.py b/backend/dataall/core/environment/cdk/env_role_core_policies/ssm.py new file mode 100644 index 000000000..c05bd296b --- /dev/null +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/ssm.py @@ -0,0 +1,54 @@ +from .service_policy import ServicePolicy +from aws_cdk import aws_iam + + +class SSM(ServicePolicy): + """ + Class including all permissions needed to work with AWS SSM Parameter Store. + """ + + def get_statements(self, group_permissions, **kwargs): + statements = [ + aws_iam.PolicyStatement( + # sid="SSMReadAll", + effect=aws_iam.Effect.ALLOW, + actions=[ + "ssm:DescribeParameters", + ], + resources=["*"], + ), + aws_iam.PolicyStatement( + # sid='CreateTeamParameters', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'ssm:AddTagsToResource' + ], + resources=[f"arn:aws:ssm:*:{self.account}:parameter/{self.resource_prefix}*"], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value] + } + }, + ), + aws_iam.PolicyStatement( + # sid='ManageTeamParameters', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'ssm:PutParameter', + 'ssm:DeleteParameter', + 'ssm:GetParameterHistory', + 'ssm:GetParametersByPath', + 'ssm:GetParameters', + 'ssm:GetParameter', + 'ssm:DeleteParameters', + 'ssm:ListTagsForResource', + ], + resources=[f"arn:aws:ssm:*:{self.account}:parameter/{self.resource_prefix}*"], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ) + ] + return statements diff --git a/backend/dataall/core/environment/cdk/environment_stack.py b/backend/dataall/core/environment/cdk/environment_stack.py new file mode 100644 index 000000000..64cb00953 --- /dev/null +++ b/backend/dataall/core/environment/cdk/environment_stack.py @@ -0,0 +1,540 @@ +import logging +import os +import pathlib +from abc import abstractmethod +from typing import List, Type + +from aws_cdk import ( + custom_resources as cr, + aws_s3 as s3, + aws_iam as iam, + aws_sns as sns, + aws_sqs as sqs, + aws_sns_subscriptions as sns_subs, + aws_kms as kms, + aws_athena, + RemovalPolicy, + CfnOutput, + Stack, + Duration, + Tags, +) + +from dataall.core.stacks.services.runtime_stacks_tagging import TagsUtil +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.base.cdkproxy.stacks.manager import stack +from dataall.core.environment.cdk.pivot_role_stack import PivotRole +from dataall.core.environment.cdk.env_role_core_policies.data_policy import S3Policy +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.base import db +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.base.aws.sts import SessionHelper +from dataall.base.utils.cdk_nag_utils import CDKNagUtil + +logger = logging.getLogger(__name__) + + +class EnvironmentStackExtension: + @staticmethod + @abstractmethod + def extent(setup: 'EnvironmentSetup'): + raise NotImplementedError + + +@stack(stack='environment') +class EnvironmentSetup(Stack): + """Deploy common environment resources: + - default environment S3 Bucket + - SSM parameters for the Lambdas and Providers + - pivotRole (if configured) + - SNS topic (if subscriptions are enabled) + - Module extension stacks (if module is enabled and has an associated extension stack) + - Deploy team specific resources: teams IAM roles, Athena workgroups + - Set PivotRole as Lake formation data lake Admin - lakeformationdefaultsettings custom resource + """ + module_name = __file__ + _EXTENSIONS: List[Type[EnvironmentStackExtension]] = [] + + @staticmethod + def register(extension: Type[EnvironmentStackExtension]): + EnvironmentSetup._EXTENSIONS.append(extension) + + def environment(self) -> Environment: + return self._environment + + @staticmethod + def get_env_name(): + return os.environ.get('envname', 'local') + + def get_engine(self): + engine = db.get_engine(envname=self.get_env_name()) + return engine + + def get_target(self, target_uri) -> Environment: + engine = self.get_engine() + with engine.scoped_session() as session: + target = session.query(Environment).get(target_uri) + if not target: + raise Exception('ObjectNotFound') + return target + + @staticmethod + def get_environment_group_permissions(engine, environmentUri, group): + with engine.scoped_session() as session: + group_permissions = EnvironmentService.list_group_permissions_internal( + session=session, + uri=environmentUri, + group_uri=group + ) + permission_names = [permission.name for permission in group_permissions] + return permission_names + + @staticmethod + def get_environment_groups(engine, environment: Environment) -> [EnvironmentGroup]: + with engine.scoped_session() as session: + return EnvironmentService.list_environment_invited_groups( + session, + uri=environment.environmentUri, + ) + + @staticmethod + def get_environment_admins_group(engine, environment: Environment) -> [EnvironmentGroup]: + with engine.scoped_session() as session: + return EnvironmentService.get_environment_group( + session, + environment_uri=environment.environmentUri, + group_uri=environment.SamlGroupName, + ) + + def __init__(self, scope, id, target_uri: str = None, **kwargs): + super().__init__( + scope, + id, + description='Cloud formation stack of ENVIRONMENT: {}; URI: {}; DESCRIPTION: {}'.format( + self.get_target(target_uri=target_uri).label, + target_uri, + self.get_target(target_uri=target_uri).description, + )[:1024], + **kwargs, + ) + # Read input + self.target_uri = target_uri + self.pivot_role_name = SessionHelper.get_delegation_role_name() + self.external_id = SessionHelper.get_external_id_secret() + self.dataall_central_account = SessionHelper.get_account() + + pivot_role_as_part_of_environment_stack = ParameterStoreManager.get_parameter_value( + region=os.getenv('AWS_REGION', 'eu-west-1'), + parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate" + ) + self.create_pivot_role = True if pivot_role_as_part_of_environment_stack == "True" else False + self.engine = self.get_engine() + + self._environment = self.get_target(target_uri=target_uri) + + self.environment_groups: [EnvironmentGroup] = self.get_environment_groups( + self.engine, environment=self._environment + ) + + self.environment_admins_group: EnvironmentGroup = self.get_environment_admins_group( + self.engine, self._environment + ) + + # Create or import Pivot role + if self.create_pivot_role is True: + config = { + 'roleName': self.pivot_role_name, + 'accountId': self.dataall_central_account, + 'externalId': self.external_id, + 'resourcePrefix': self._environment.resourcePrefix, + } + pivot_role_stack = PivotRole(self, 'PivotRoleStack', config) + self.pivot_role = iam.Role.from_role_arn( + self, + f'PivotRole{self._environment.environmentUri}', + pivot_role_stack.pivot_role.role_arn, + ) + else: + self.pivot_role = iam.Role.from_role_arn( + self, + f'PivotRole{self._environment.environmentUri}', + f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', + ) + + # Environment S3 Bucket + default_environment_bucket = s3.Bucket( + self, + 'EnvironmentDefaultBucket', + bucket_name=self._environment.EnvironmentDefaultBucketName, + encryption=s3.BucketEncryption.S3_MANAGED, + removal_policy=RemovalPolicy.RETAIN, + block_public_access=s3.BlockPublicAccess.BLOCK_ALL, + versioned=True, + enforce_ssl=True, + ) + self.default_environment_bucket = default_environment_bucket + + default_environment_bucket.add_to_resource_policy( + iam.PolicyStatement( + sid='AWSLogDeliveryWrite', + effect=iam.Effect.ALLOW, + principals=[iam.ServicePrincipal('logging.s3.amazonaws.com')], + actions=['s3:PutObject', 's3:PutObjectAcl'], + resources=[f'{default_environment_bucket.bucket_arn}/*'], + ) + ) + + default_environment_bucket.add_lifecycle_rule( + abort_incomplete_multipart_upload_after=Duration.days(7), + noncurrent_version_transitions=[ + s3.NoncurrentVersionTransition( + storage_class=s3.StorageClass.INFREQUENT_ACCESS, + transition_after=Duration.days(30), + ), + s3.NoncurrentVersionTransition( + storage_class=s3.StorageClass.GLACIER, + transition_after=Duration.days(60), + ), + ], + transitions=[ + s3.Transition( + storage_class=s3.StorageClass.INTELLIGENT_TIERING, + transition_after=Duration.days(90), + ), + s3.Transition( + storage_class=s3.StorageClass.GLACIER, + transition_after=Duration.days(360), + ), + ], + enabled=True, + ) + + # Create or import team IAM roles + self.default_role = self.create_or_import_environment_admin_group_role() + self.group_roles = self.create_or_import_environment_groups_roles() + + self.create_default_athena_workgroup( + default_environment_bucket, + self._environment.EnvironmentDefaultAthenaWorkGroup, + ) + self.create_athena_workgroups(self.environment_groups, default_environment_bucket) + + # Create SNS topics for subscriptions + if self._environment.subscriptionsEnabled: + subscription_key_policy = iam.PolicyDocument( + assign_sids=True, + statements=[ + iam.PolicyStatement( + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + ], + effect=iam.Effect.ALLOW, + principals=[self.default_role] + self.group_roles, + resources=["*"], + conditions={ + "StringEquals": { + "kms:ViaService": [ + f"sqs.{self._environment.region}.amazonaws.com", + f"sns.{self._environment.region}.amazonaws.com", + ] + } + } + ), + iam.PolicyStatement( + actions=[ + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ], + effect=iam.Effect.ALLOW, + principals=[self.default_role] + self.group_roles, + resources=["*"], + ) + ] + ) + subscription_key = kms.Key( + self, + f'dataall-env-{self._environment.environmentUri}-subscription-key', + removal_policy=RemovalPolicy.DESTROY, + alias=f'dataall-env-{self._environment.environmentUri}-subscription-key', + enable_key_rotation=True, + admins=[ + iam.ArnPrincipal(self._environment.CDKRoleArn), + ], + policy=subscription_key_policy + ) + + dlq_queue = sqs.Queue( + self, + f'ProducersSubscriptionsQueue-{self._environment.environmentUri}-dlq', + queue_name=f'{self._environment.resourcePrefix}-producers-dlq-{self._environment.environmentUri}', + retention_period=Duration.days(14), + encryption=sqs.QueueEncryption.KMS, + encryption_master_key=subscription_key, + ) + dlq_queue.add_to_resource_policy( + iam.PolicyStatement( + sid='Enforce TLS for all principals', + effect=iam.Effect.DENY, + principals=[ + iam.AnyPrincipal(), + ], + actions=[ + 'sqs:*', + ], + resources=[dlq_queue.queue_arn], + conditions={ + 'Bool': {'aws:SecureTransport': 'false'}, + }, + ) + ) + self.dlq = sqs.DeadLetterQueue(max_receive_count=2, queue=dlq_queue) + queue = sqs.Queue( + self, + f'ProducersSubscriptionsQueue-{self._environment.environmentUri}', + queue_name=f'{self._environment.resourcePrefix}-producers-queue-{self._environment.environmentUri}', + dead_letter_queue=self.dlq, + encryption=sqs.QueueEncryption.KMS, + encryption_master_key=subscription_key, + ) + + if self._environment.subscriptionsProducersTopicImported: + topic = sns.Topic.from_topic_arn( + self, + 'ProducersTopicImported', + f'arn:aws:sns:{self._environment.region}:{self._environment.AwsAccountId}:{self._environment.subscriptionsProducersTopicName}', + ) + else: + topic = self.create_topic( + self._environment.subscriptionsProducersTopicName, + self.dataall_central_account, + self._environment, + subscription_key + ) + + topic.add_subscription(sns_subs.SqsSubscription(queue)) + + policy = sqs.QueuePolicy( + self, + f'{self._environment.resourcePrefix}ProducersSubscriptionsQueuePolicy', + queues=[queue], + ) + + policy.document.add_statements( + iam.PolicyStatement( + principals=[iam.AccountPrincipal(self.dataall_central_account)], + effect=iam.Effect.ALLOW, + actions=[ + 'sqs:ReceiveMessage', + 'sqs:DeleteMessage', + 'sqs:ChangeMessageVisibility', + 'sqs:GetQueueUrl', + 'sqs:GetQueueAttributes', + ], + resources=[queue.queue_arn], + ), + iam.PolicyStatement( + principals=[iam.ServicePrincipal('sns.amazonaws.com')], + effect=iam.Effect.ALLOW, + actions=['sqs:SendMessage'], + resources=[queue.queue_arn], + conditions={'ArnEquals': {'aws:SourceArn': topic.topic_arn}}, + ), + iam.PolicyStatement( + sid='Enforce TLS for all principals', + effect=iam.Effect.DENY, + principals=[ + iam.AnyPrincipal(), + ], + actions=[ + 'sqs:*', + ], + resources=[queue.queue_arn], + conditions={ + 'Bool': {'aws:SecureTransport': 'false'}, + }, + ), + ) + policy.node.add_dependency(topic) + + self.create_topic( + self._environment.subscriptionsConsumersTopicName, + self.dataall_central_account, + self._environment, + subscription_key + ) + + # print the IAM role arn for this service account + CfnOutput( + self, + f'pivotRoleName-{self._environment.environmentUri}', + export_name=f'pivotRoleName-{self._environment.environmentUri}', + value=self.pivot_role_name, + description='pivotRole name, helps us to distinguish between auto-created pivot roles (dataallPivotRole-cdk) and manually created pivot roles (dataallPivotRole)', + ) + + for extension in EnvironmentSetup._EXTENSIONS: + logger.info(f"Adding extension stack{extension.__name__}") + extension.extent(self) + + TagsUtil.add_tags(stack=self, model=Environment, target_type="environment") + + CDKNagUtil.check_rules(self) + + def create_or_import_environment_admin_group_role(self): + if self._environment.EnvironmentDefaultIAMRoleImported: + default_role = iam.Role.from_role_arn( + self, + f'EnvironmentRole{self._environment.environmentUri}Imported', + self._environment.EnvironmentDefaultIAMRoleArn, + ) + return default_role + else: + environment_admin_group_role = self.create_group_environment_role(group=self.environment_admins_group, id='DefaultEnvironmentRole') + return environment_admin_group_role + + def create_or_import_environment_groups_roles(self): + group: EnvironmentGroup + group_roles = [] + for group in self.environment_groups: + if not group.environmentIAMRoleImported: + group_role = self.create_group_environment_role(group=group, id=f'{group.environmentIAMRoleName}') + group_roles.append(group_role) + else: + iam.Role.from_role_arn( + self, + f'{group.groupUri + group.environmentIAMRoleName}', + role_arn=f'arn:aws:iam::{self._environment.AwsAccountId}:role/{group.environmentIAMRoleName}', + ) + return group_roles + + def create_group_environment_role(self, group: EnvironmentGroup, id: str): + + group_permissions = self.get_environment_group_permissions( + self.engine, self._environment.environmentUri, group.groupUri + ) + services_policies = ServicePolicy( + stack=self, + tag_key='Team', + tag_value=group.groupUri, + resource_prefix=self._environment.resourcePrefix, + name=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', + id=f'{self._environment.resourcePrefix}-{group.groupUri}-{self._environment.environmentUri}-services-policy', + role_name=group.environmentIAMRoleName, + account=self._environment.AwsAccountId, + region=self._environment.region, + environment=self._environment, + team=group, + permissions=group_permissions, + ).generate_policies() + + with self.engine.scoped_session() as session: + data_policy = S3Policy( + stack=self, + tag_key='Team', + tag_value=group.groupUri, + resource_prefix=self._environment.resourcePrefix, + name=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', + id=f'{self._environment.resourcePrefix}-{group.groupUri}-data-policy', + account=self._environment.AwsAccountId, + region=self._environment.region, + environment=self._environment, + team=group, + ).generate_data_access_policy(session=session) + + group_role = iam.Role( + self, + id, + role_name=group.environmentIAMRoleName, + inline_policies={ + f'{group.environmentIAMRoleName}DataPolicy': data_policy.document, + }, + managed_policies=services_policies, + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal('glue.amazonaws.com'), + iam.ServicePrincipal('lambda.amazonaws.com'), + iam.ServicePrincipal('sagemaker.amazonaws.com'), + iam.ServicePrincipal('states.amazonaws.com'), + iam.ServicePrincipal('databrew.amazonaws.com'), + iam.ServicePrincipal('codebuild.amazonaws.com'), + iam.ServicePrincipal('codepipeline.amazonaws.com'), + self.pivot_role, + ), + ) + Tags.of(group_role).add('group', group.groupUri) + return group_role + + def create_default_athena_workgroup(self, output_bucket, workgroup_name): + return self.create_athena_workgroup(output_bucket, workgroup_name) + + def create_athena_workgroups(self, environment_groups, default_environment_bucket): + for group in environment_groups: + self.create_athena_workgroup(default_environment_bucket, group.environmentAthenaWorkGroup) + + def create_athena_workgroup(self, output_bucket, workgroup_name): + athena_workgroup_output_location = ''.join( + ['s3://', output_bucket.bucket_name, '/athenaqueries/', workgroup_name, '/'] + ) + athena_workgroup = aws_athena.CfnWorkGroup( + self, + f'AthenaWorkGroup{workgroup_name}', + name=workgroup_name, + state='ENABLED', + recursive_delete_option=True, + work_group_configuration=aws_athena.CfnWorkGroup.WorkGroupConfigurationProperty( + enforce_work_group_configuration=True, + result_configuration=aws_athena.CfnWorkGroup.ResultConfigurationProperty( + encryption_configuration=aws_athena.CfnWorkGroup.EncryptionConfigurationProperty( + encryption_option='SSE_S3', + ), + output_location=athena_workgroup_output_location, + ), + requester_pays_enabled=False, + publish_cloud_watch_metrics_enabled=False, + engine_version=aws_athena.CfnWorkGroup.EngineVersionProperty( + selected_engine_version='Athena engine version 2', + ), + ), + ) + return athena_workgroup + + def create_topic(self, construct_id, central_account, environment, kms_key): + actions = [ + 'SNS:GetTopicAttributes', + 'SNS:SetTopicAttributes', + 'SNS:AddPermission', + 'SNS:RemovePermission', + 'SNS:DeleteTopic', + 'SNS:Subscribe', + 'SNS:ListSubscriptionsByTopic', + 'SNS:Publish', + 'SNS:Receive', + ] + topic = sns.Topic( + self, + f'{construct_id}', + topic_name=f'{construct_id}', + master_key=kms_key + ) + + topic.add_to_resource_policy( + iam.PolicyStatement( + principals=[iam.AccountPrincipal(central_account)], + effect=iam.Effect.ALLOW, + actions=actions, + resources=[topic.topic_arn], + ) + ) + topic.add_to_resource_policy( + iam.PolicyStatement( + principals=[iam.AccountPrincipal(environment.AwsAccountId)], + effect=iam.Effect.ALLOW, + actions=actions, + resources=[topic.topic_arn], + ) + ) + return topic diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/__init__.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/__init__.py new file mode 100644 index 000000000..f3f259887 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/__init__.py @@ -0,0 +1,25 @@ +"""Contains the code for creating pivot role policies""" + +from dataall.core.environment.cdk.pivot_role_core_policies import ( + cloudformation, + iam, + kms, + logging, + s3, + sns, + sqs, + ssm, + sts +) + +__all__ = [ + "cloudformation", + "iam", + "kms", + "logging", + "s3", + "sns", + "sqs", + "ssm", + "sts" +] diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/cloudformation.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/cloudformation.py new file mode 100644 index 000000000..0ea69f9f7 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/cloudformation.py @@ -0,0 +1,28 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class CloudformationPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS CloudFormation. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='CloudFormation', + effect=iam.Effect.ALLOW, + actions=[ + "cloudformation:DeleteStack", + "cloudformation:DescribeStacks", + "cloudformation:DescribeStackEvents", + "cloudformation:DescribeStackResources" + ], + resources=[ + f'arn:aws:cloudformation:*:{self.account}:stack/{self.env_resource_prefix}*/*', + f'arn:aws:cloudformation:*:{self.account}:stack/CDKToolkit/*', + ], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/iam.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/iam.py new file mode 100644 index 000000000..5fb23fc26 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/iam.py @@ -0,0 +1,32 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class IAMPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS IAM. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # IAM - needed for consumption roles and for S3 sharing + iam.PolicyStatement( + sid='IAMListGet', + effect=iam.Effect.ALLOW, + actions=[ + 'iam:ListRoles', + 'iam:Get*' + ], resources=['*'] + ), + iam.PolicyStatement( + sid="PassRole", + actions=[ + 'iam:PassRole', + ], + resources=[ + f'arn:aws:iam::{self.account}:role/{self.role_name}', + ], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/kms.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/kms.py new file mode 100644 index 000000000..4d47b6afc --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/kms.py @@ -0,0 +1,37 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class KMSPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS KMS. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='KMS', + effect=iam.Effect.ALLOW, + actions=[ + 'kms:Decrypt', + 'kms:Encrypt', + 'kms:GenerateDataKey*', + 'kms:PutKeyPolicy', + 'kms:ReEncrypt*', + 'kms:TagResource', + 'kms:UntagResource', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='KMSList', + effect=iam.Effect.ALLOW, + actions=[ + 'kms:List*', + 'kms:DescribeKey', + ], + resources=['*'], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/logging.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/logging.py new file mode 100644 index 000000000..7804632ac --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/logging.py @@ -0,0 +1,42 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class LoggingPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS CloudWatch. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # CloudWatch Metrics + iam.PolicyStatement( + sid='CWMetrics', + effect=iam.Effect.ALLOW, + actions=[ + 'cloudwatch:PutMetricData', + 'cloudwatch:GetMetricData', + 'cloudwatch:GetMetricStatistics' + ], + resources=['*'], + ), + # Logs + iam.PolicyStatement( + sid='Logs', + effect=iam.Effect.ALLOW, + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + ], + resources=[ + f'arn:aws:logs:*:{self.account}:log-group:/aws/lambda/*', + f'arn:aws:logs:*:{self.account}:log-group:/{self.env_resource_prefix}*', + ], + ), + # Logging + iam.PolicyStatement( + sid='Logging', effect=iam.Effect.ALLOW, actions=['logs:PutLogEvents'], resources=['*'] + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/s3.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/s3.py new file mode 100644 index 000000000..360fcc03b --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/s3.py @@ -0,0 +1,47 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class S3PivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS S3. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # Read Buckets + iam.PolicyStatement( + sid='ReadBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:ListAllMyBuckets', + 's3:GetBucketLocation', + 's3:PutBucketTagging' + ], + resources=['*'], + ), + # S3 Managed Buckets + iam.PolicyStatement( + sid='ManagedBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:List*', + 's3:Delete*', + 's3:Get*', + 's3:Put*' + ], + resources=[f'arn:aws:s3:::{self.env_resource_prefix}*'], + ), + # AWS Logging Buckets + iam.PolicyStatement( + sid='AWSLoggingBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:PutBucketAcl', + 's3:PutBucketNotification' + ], + resources=[f'arn:aws:s3:::{self.env_resource_prefix}-logging-*'], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/sns.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sns.py new file mode 100644 index 000000000..14a9c2e2d --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sns.py @@ -0,0 +1,32 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class SNSPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS SNS. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='SNSPublish', + effect=iam.Effect.ALLOW, + actions=[ + 'sns:Publish', + 'sns:SetTopicAttributes', + 'sns:GetTopicAttributes', + 'sns:DeleteTopic', + 'sns:Subscribe', + 'sns:TagResource', + 'sns:UntagResource', + 'sns:CreateTopic', + ], + resources=[f'arn:aws:sns:*:{self.account}:{self.env_resource_prefix}*'], + ), + iam.PolicyStatement( + sid='SNSList', effect=iam.Effect.ALLOW, actions=['sns:ListTopics'], resources=['*'] + ) + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/sqs.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sqs.py new file mode 100644 index 000000000..ca2dff599 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sqs.py @@ -0,0 +1,27 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class SQSPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS SQS. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # SQS - support SQS queues + iam.PolicyStatement( + sid='SQSList', effect=iam.Effect.ALLOW, actions=['sqs:ListQueues'], resources=['*'] + ), + iam.PolicyStatement( + sid='SQS', + effect=iam.Effect.ALLOW, + actions=[ + 'sqs:ReceiveMessage', + 'sqs:SendMessage' + ], + resources=[f'arn:aws:sqs:*:{self.account}:{self.env_resource_prefix}*'], + ) + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/ssm.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/ssm.py new file mode 100644 index 000000000..f3f9f7697 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/ssm.py @@ -0,0 +1,24 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class SSMPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS SSM. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # SSM Parameter Store + iam.PolicyStatement( + sid='ParameterStore', + effect=iam.Effect.ALLOW, + actions=['ssm:GetParameter'], + resources=[ + f'arn:aws:ssm:*:{self.account}:parameter/{self.env_resource_prefix}/*', + f'arn:aws:ssm:*:{self.account}:parameter/dataall/*', + ], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_core_policies/sts.py b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sts.py new file mode 100644 index 000000000..b9c6f8734 --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_core_policies/sts.py @@ -0,0 +1,22 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class STSPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS STS. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='STS', + effect=iam.Effect.ALLOW, + actions=['sts:AssumeRole'], + resources=[ + f'arn:aws:iam::{self.account}:role/{self.env_resource_prefix}*' + ], + ), + ] + return statements diff --git a/backend/dataall/core/environment/cdk/pivot_role_stack.py b/backend/dataall/core/environment/cdk/pivot_role_stack.py new file mode 100644 index 000000000..6329b86ca --- /dev/null +++ b/backend/dataall/core/environment/cdk/pivot_role_stack.py @@ -0,0 +1,131 @@ +import logging +from typing import List +from constructs import Construct +from aws_cdk import Duration, aws_iam as iam, NestedStack + +logger = logging.getLogger(__name__) + + +class PivotRoleStatementSet(object): + def __init__( + self, + stack, + env_resource_prefix, + role_name, + account, + region + ): + self.stack = stack + self.env_resource_prefix = env_resource_prefix + self.role_name = role_name + self.account = account + self.region = region + + def generate_policies(self) -> List[iam.ManagedPolicy]: + """ + Creates a list of aws_iam.Policy based on declared subclasses of Policy object + """ + policies = [] + statements = [] + services = PivotRoleStatementSet.__subclasses__() + logger.info(f'Found {len(services)} subclasses of PivotRoleStatementSet') + logger.info(f'PivotroleStatement variables: {self.env_resource_prefix}, {self.role_name}, {self.account}, {self.region}') + + for service in services: + statements.extend(service.get_statements(self)) + logger.info(f'Adding {service.__name__} statements to policy') + logger.info(f'statements: {str(service.get_statements(self))}') + + statements_chunks: list = [ + statements[i: i + 10] for i in range(0, len(statements), 10) + ] + + for index, chunk in enumerate(statements_chunks): + policies.append( + iam.ManagedPolicy( + self.stack, + f'PivotRolePolicy-{index+1}', + managed_policy_name=f'{self.env_resource_prefix}-pivotrole-cdk-policy-{index+1}', + statements=chunk, + ) + ) + return policies + + def get_statements(self) -> List[iam.PolicyStatement]: + """ + This method returns the list of IAM policy statements needed to be added to the pivot role policies + :return: list + """ + raise NotImplementedError( + 'PivotRoleStatementSet subclasses need to implement the get_statements class method' + ) + + +class PivotRole(NestedStack): + def __init__(self, scope: Construct, construct_id: str, config, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + self.env_resource_prefix = config['resourcePrefix'] + self.role_name = config['roleName'] + + from dataall.core.environment.cdk import pivot_role_core_policies + + # Create Pivot IAM Role + self.pivot_role = self.create_pivot_role( + principal_id=config['accountId'], + external_id=config['externalId'], + ) + + # Data.All IAM Lake Formation service role creation + self.lf_service_role = iam.CfnServiceLinkedRole( + self, 'LakeFormationSLR', aws_service_name='lakeformation.amazonaws.com' + ) + + def create_pivot_role(self, principal_id: str, external_id: str) -> iam.Role: + """ + Creates an IAM Role that will enable data.all to interact with this Data Account + + :param str principal_id: AWS Account ID of central data.all + :param str external_id: External ID provided by data.all + :returns: Created IAM Role + :rtype: iam.Role + """ + managed_policies = PivotRoleStatementSet( + stack=self, + env_resource_prefix=self.env_resource_prefix, + role_name=self.role_name, + account=self.account, + region=self.region + ).generate_policies() + + logger.info(f'Managed Policies: {managed_policies}') + role = iam.Role( + self, + 'DataAllPivotRole-cdk', + role_name=self.role_name, + assumed_by=iam.CompositePrincipal( + iam.ServicePrincipal('lakeformation.amazonaws.com'), + iam.ServicePrincipal('glue.amazonaws.com'), + iam.ServicePrincipal('lambda.amazonaws.com'), + ), + path='/', + max_session_duration=Duration.hours(12), + managed_policies=managed_policies + ) + + role.assume_role_policy.add_statements( + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + principals=[iam.AccountPrincipal(account_id=principal_id)], + actions=['sts:AssumeRole'], + conditions={ + 'StringEquals': {'sts:ExternalId': external_id}, + 'StringLike': {"aws:PrincipalArn": [ + f"arn:aws:iam::{principal_id}:role/*graphql-role", + f"arn:aws:iam::{principal_id}:role/*awsworker-role", + f"arn:aws:iam::{principal_id}:role/*ecs-tasks-role" + ]} + }, + ) + ) + + return role diff --git a/tests/api/__init__.py b/backend/dataall/core/environment/db/__init__.py similarity index 100% rename from tests/api/__init__.py rename to backend/dataall/core/environment/db/__init__.py diff --git a/backend/dataall/core/environment/db/environment_models.py b/backend/dataall/core/environment/db/environment_models.py new file mode 100644 index 000000000..a05df3259 --- /dev/null +++ b/backend/dataall/core/environment/db/environment_models.py @@ -0,0 +1,89 @@ +"""The package contains the database models that are related to the environment""" +import datetime + +from sqlalchemy import Boolean, Column, DateTime, String, ForeignKey +from sqlalchemy.orm import query_expression +from dataall.base.db import Resource, Base, utils + +from dataall.core.environment.api.enums import EnvironmentPermission + + +class Environment(Resource, Base): + __tablename__ = 'environment' + organizationUri = Column(String, nullable=False) + environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) + AwsAccountId = Column(String, nullable=False) + region = Column(String, nullable=False, default='eu-west-1') + cognitoGroupName = Column(String, nullable=True) + resourcePrefix = Column(String, nullable=False, default='dataall') + + validated = Column(Boolean, default=False) + environmentType = Column(String, nullable=False, default='Data') + isOrganizationDefaultEnvironment = Column(Boolean, default=False) + EnvironmentDefaultIAMRoleName = Column(String, nullable=False) + EnvironmentDefaultIAMRoleImported = Column(Boolean, default=False) + EnvironmentDefaultIAMRoleArn = Column(String, nullable=False) + EnvironmentDefaultBucketName = Column(String) + EnvironmentDefaultAthenaWorkGroup = Column(String) + roleCreated = Column(Boolean, nullable=False, default=False) + + userRoleInEnvironment = query_expression() + + SamlGroupName = Column(String, nullable=True) + CDKRoleArn = Column(String, nullable=False) + + subscriptionsEnabled = Column(Boolean, default=False) + subscriptionsProducersTopicName = Column(String) + subscriptionsProducersTopicImported = Column(Boolean, default=False) + subscriptionsConsumersTopicName = Column(String) + subscriptionsConsumersTopicImported = Column(Boolean, default=False) + + +class EnvironmentGroup(Base): + __tablename__ = 'environment_group_permission' + groupUri = Column(String, primary_key=True) + environmentUri = Column(String, primary_key=True) + invitedBy = Column(String, nullable=True) + environmentIAMRoleArn = Column(String, nullable=True) + environmentIAMRoleName = Column(String, nullable=True) + environmentIAMRoleImported = Column(Boolean, default=False) + environmentAthenaWorkGroup = Column(String, nullable=True) + description = Column(String, default='No description provided') + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + deleted = Column(DateTime) + + # environmentRole is the role of the entity (group or user) in the Environment + groupRoleInEnvironment = Column( + String, nullable=False, default=EnvironmentPermission.Invited.value + ) + + +class EnvironmentParameter(Base): + """Represent the parameter of the environment""" + __tablename__ = 'environment_parameters' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), primary_key=True) + key = Column('paramKey', String, primary_key=True) + value = Column('paramValue', String, nullable=True) + + def __init__(self, env_uri, key, value): + super().__init__() + self.environmentUri = env_uri + self.key = key + self.value = value + + def __repr__(self): + return f'EnvironmentParameter(paramKey={self.key}, paramValue={self.value})' + + +class ConsumptionRole(Base): + __tablename__ = 'consumptionrole' + consumptionRoleUri = Column(String, primary_key=True, default=utils.uuid('group')) + consumptionRoleName = Column(String, nullable=False) + environmentUri = Column(String, nullable=False) + groupUri = Column(String, nullable=False) + IAMRoleName = Column(String, nullable=False) + IAMRoleArn = Column(String, nullable=False) + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + deleted = Column(DateTime) diff --git a/backend/dataall/core/environment/db/environment_repositories.py b/backend/dataall/core/environment/db/environment_repositories.py new file mode 100644 index 000000000..58e1195d4 --- /dev/null +++ b/backend/dataall/core/environment/db/environment_repositories.py @@ -0,0 +1,46 @@ +from dataall.core.environment.db.environment_models import EnvironmentParameter, Environment +from sqlalchemy.sql import and_ + +from dataall.base.db import exceptions + + +class EnvironmentParameterRepository: + """CRUD operations for EnvironmentParameter""" + + def __init__(self, session): + self._session = session + + def get_param(self, env_uri, param_key): + return self._session.query(EnvironmentParameter).filter( + and_( + EnvironmentParameter.environmentUri == env_uri, + EnvironmentParameter.key == param_key + ) + ).first() + + def get_params(self, env_uri): + return self._session.query(EnvironmentParameter).filter( + EnvironmentParameter.environmentUri == env_uri + ) + + def update_params(self, env_uri, params): + """Rewrite all parameters for the environment""" + self.delete_params(env_uri) + self._session.add_all(params) + + def delete_params(self, env_uri): + """Erase all environment parameters""" + self._session.query(EnvironmentParameter).filter( + EnvironmentParameter.environmentUri == env_uri + ).delete() + + +class EnvironmentRepository: + @staticmethod + def get_environment_by_uri(session, uri): + if not uri: + raise exceptions.RequiredParameter('environmentUri') + environment: Environment = session.query(Environment).get(uri) + if not environment: + raise exceptions.ObjectNotFound(Environment.__name__, uri) + return environment diff --git a/backend/dataall/core/environment/env_permission_checker.py b/backend/dataall/core/environment/env_permission_checker.py new file mode 100644 index 000000000..92d57b0c2 --- /dev/null +++ b/backend/dataall/core/environment/env_permission_checker.py @@ -0,0 +1,30 @@ +from dataall.base.context import get_context, RequestContext +from dataall.core.permissions.db.group_policy_repositories import GroupPolicy +from dataall.base.utils.decorator_utls import process_func + + +def _check_group_environment_permission(session, permission, uri, admin_group): + context: RequestContext = get_context() + GroupPolicy.check_group_environment_permission( + session=session, + username=context.username, + groups=context.groups, + uri=uri, + group=admin_group, + permission_name=permission, + ) + + +def has_group_permission(permission): + def decorator(f): + fn, fn_decorator = process_func(f) + + def decorated(*args, admin_group, uri, **kwargs): + with get_context().db_engine.scoped_session() as session: + _check_group_environment_permission(session, permission, uri, admin_group) + + return fn(*args, uri=uri, admin_group=admin_group, **kwargs) + + return fn_decorator(decorated) + + return decorator diff --git a/tests/cdkproxy/__init__.py b/backend/dataall/core/environment/services/__init__.py similarity index 100% rename from tests/cdkproxy/__init__.py rename to backend/dataall/core/environment/services/__init__.py diff --git a/backend/dataall/core/environment/services/env_stack_finder.py b/backend/dataall/core/environment/services/env_stack_finder.py new file mode 100644 index 000000000..106fec5b6 --- /dev/null +++ b/backend/dataall/core/environment/services/env_stack_finder.py @@ -0,0 +1,17 @@ +from abc import ABC +from typing import List + + +class StackFinder(ABC): + _FINDERS: List['StackFinder'] = [] + + @staticmethod + def all(): + return StackFinder._FINDERS + + def __init__(self): + StackFinder._FINDERS.append(self) + + def find_stack_uris(self, session) -> List[str]: + """Finds stacks to update""" + raise NotImplementedError("find_stack_uris is not implemented") diff --git a/backend/dataall/core/environment/services/environment_resource_manager.py b/backend/dataall/core/environment/services/environment_resource_manager.py new file mode 100644 index 000000000..bc74f01bf --- /dev/null +++ b/backend/dataall/core/environment/services/environment_resource_manager.py @@ -0,0 +1,59 @@ +from abc import ABC +from typing import List + + +class EnvironmentResource(ABC): + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return 0 + + @staticmethod + def delete_env(session, environment): + pass + + @staticmethod + def update_env(session, environment): + return False + + @staticmethod + def count_role_resources(session, role_uri): + return 0 + + +class EnvironmentResourceManager: + """ + API for managing environment and environment group lifecycle. + Contains callbacks that are invoked when something is happened with the environment. + """ + _resources: List[EnvironmentResource] = [] + + @classmethod + def register(cls, resource: EnvironmentResource): + cls._resources.append(resource) + + @classmethod + def count_group_resources(cls, session, environment, group_uri) -> int: + counter = 0 + for resource in cls._resources: + counter += resource.count_resources(session, environment, group_uri) + return counter + + @classmethod + def deploy_updated_stack(cls, session, prev_prefix, environment): + deploy_stack = prev_prefix != environment.resourcePrefix + for resource in cls._resources: + deploy_stack |= resource.update_env(session, environment) + + return deploy_stack + + @classmethod + def delete_env(cls, session, environment): + for resource in cls._resources: + resource.delete_env(session, environment) + + @classmethod + def count_consumption_role_resources(cls, session, role_uri): + counter = 0 + for resource in cls._resources: + counter += resource.count_role_resources(session, role_uri) + return counter diff --git a/backend/dataall/core/environment/services/environment_service.py b/backend/dataall/core/environment/services/environment_service.py new file mode 100644 index 000000000..78635cd35 --- /dev/null +++ b/backend/dataall/core/environment/services/environment_service.py @@ -0,0 +1,871 @@ +import logging +import re + +from sqlalchemy import or_ +from sqlalchemy.orm import Query +from sqlalchemy.sql import and_ + +from dataall.base.context import get_context +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.db.environment_models import EnvironmentParameter, ConsumptionRole +from dataall.core.environment.db.environment_repositories import EnvironmentParameterRepository, EnvironmentRepository +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.permission_models import PermissionType +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.core.vpc.db.vpc_models import Vpc +from dataall.base.db.paginator import paginate +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) +from dataall.base.db import exceptions +from dataall.core.permissions import permissions +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.api.enums import EnvironmentPermission, EnvironmentType + +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.core.stacks.db.stack_models import Stack + +log = logging.getLogger(__name__) + + +class EnvironmentService: + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.LINK_ENVIRONMENT) + def create_environment(session, uri, data=None): + context = get_context() + EnvironmentService._validate_creation_params(data, uri) + organization = Organization.get_organization_by_uri(session, uri) + env = Environment( + organizationUri=data.get('organizationUri'), + label=data.get('label', 'Unnamed'), + tags=data.get('tags', []), + owner=context.username, + description=data.get('description', ''), + environmentType=data.get('type', EnvironmentType.Data.value), + AwsAccountId=data.get('AwsAccountId'), + region=data.get('region'), + SamlGroupName=data['SamlGroupName'], + validated=False, + isOrganizationDefaultEnvironment=False, + userRoleInEnvironment=EnvironmentPermission.Owner.value, + EnvironmentDefaultIAMRoleName=data.get( + 'EnvironmentDefaultIAMRoleName', 'unknown' + ), + EnvironmentDefaultIAMRoleArn=f'arn:aws:iam::{data.get("AwsAccountId")}:role/{data.get("EnvironmentDefaultIAMRoleName")}', + CDKRoleArn=f"arn:aws:iam::{data.get('AwsAccountId')}:role/{data['cdk_role_name']}", + resourcePrefix=data.get('resourcePrefix'), + ) + + session.add(env) + session.commit() + + EnvironmentService._update_env_parameters(session, env, data) + + env.EnvironmentDefaultBucketName = NamingConventionService( + target_uri=env.environmentUri, + target_label=env.label, + pattern=NamingConventionPattern.S3, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + + env.EnvironmentDefaultAthenaWorkGroup = NamingConventionService( + target_uri=env.environmentUri, + target_label=env.label, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + + if not data.get('EnvironmentDefaultIAMRoleName'): + env_role_name = NamingConventionService( + target_uri=env.environmentUri, + target_label=env.label, + pattern=NamingConventionPattern.IAM, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + env.EnvironmentDefaultIAMRoleName = env_role_name + env.EnvironmentDefaultIAMRoleArn = ( + f'arn:aws:iam::{env.AwsAccountId}:role/{env_role_name}' + ) + env.EnvironmentDefaultIAMRoleImported = False + else: + env.EnvironmentDefaultIAMRoleName = data['EnvironmentDefaultIAMRoleName'] + env.EnvironmentDefaultIAMRoleArn = f'arn:aws:iam::{env.AwsAccountId}:role/{env.EnvironmentDefaultIAMRoleName}' + env.EnvironmentDefaultIAMRoleImported = True + + if data.get('vpcId'): + vpc = Vpc( + environmentUri=env.environmentUri, + region=env.region, + AwsAccountId=env.AwsAccountId, + VpcId=data.get('vpcId'), + privateSubnetIds=data.get('privateSubnetIds', []), + publicSubnetIds=data.get('publicSubnetIds', []), + SamlGroupName=data['SamlGroupName'], + owner=context.username, + label=f"{env.name}-{data.get('vpcId')}", + name=f"{env.name}-{data.get('vpcId')}", + default=True, + ) + session.add(vpc) + session.commit() + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlGroupName'], + permissions=permissions.NETWORK_ALL, + resource_uri=vpc.vpcUri, + resource_type=Vpc.__name__, + ) + env_group = EnvironmentGroup( + environmentUri=env.environmentUri, + groupUri=data['SamlGroupName'], + groupRoleInEnvironment=EnvironmentPermission.Owner.value, + environmentIAMRoleArn=env.EnvironmentDefaultIAMRoleArn, + environmentIAMRoleName=env.EnvironmentDefaultIAMRoleName, + environmentAthenaWorkGroup=env.EnvironmentDefaultAthenaWorkGroup, + ) + session.add(env_group) + ResourcePolicy.attach_resource_policy( + session=session, + resource_uri=env.environmentUri, + group=data['SamlGroupName'], + permissions=permissions.ENVIRONMENT_ALL, + resource_type=Environment.__name__, + ) + session.commit() + + activity = Activity( + action='ENVIRONMENT:CREATE', + label='ENVIRONMENT:CREATE', + owner=context.username, + summary=f'{context.username} linked environment {env.AwsAccountId} to organization {organization.name}', + targetUri=env.environmentUri, + targetType='env', + ) + session.add(activity) + return env + + @staticmethod + def _validate_creation_params(data, uri): + if not uri: + raise exceptions.RequiredParameter('organizationUri') + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('label'): + raise exceptions.RequiredParameter('label') + if not data.get('SamlGroupName'): + raise exceptions.RequiredParameter('group') + EnvironmentService._validate_resource_prefix(data) + + @staticmethod + def _validate_resource_prefix(data): + if data.get('resourcePrefix') and not bool( + re.match(r'^[a-z-]+$', data.get('resourcePrefix')) + ): + raise exceptions.InvalidInput( + 'resourcePrefix', + data.get('resourcePrefix'), + 'must match the pattern ^[a-z-]+$', + ) + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.UPDATE_ENVIRONMENT) + def update_environment(session, uri, data=None): + EnvironmentService._validate_resource_prefix(data) + environment = EnvironmentService.get_environment_by_uri(session, uri) + if data.get('label'): + environment.label = data.get('label') + if data.get('description'): + environment.description = data.get('description', 'No description provided') + if data.get('tags'): + environment.tags = data.get('tags') + if data.get('resourcePrefix'): + environment.resourcePrefix = data.get('resourcePrefix') + + EnvironmentService._update_env_parameters(session, environment, data) + + ResourcePolicy.attach_resource_policy( + session=session, + resource_uri=environment.environmentUri, + group=environment.SamlGroupName, + permissions=permissions.ENVIRONMENT_ALL, + resource_type=Environment.__name__, + ) + return environment + + @staticmethod + def _update_env_parameters(session, env: Environment, data): + """Removes old parameters and creates new parameters associated with the environment""" + params = data.get("parameters") + if not params: + return + + env_uri = env.environmentUri + new_params = [ + EnvironmentParameter(env_uri, param.get("key"), param.get("value")) + for param in params + ] + EnvironmentParameterRepository(session).update_params(env_uri, new_params) + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.INVITE_ENVIRONMENT_GROUP) + def invite_group(session, uri, data=None) -> (Environment, EnvironmentGroup): + EnvironmentService.validate_invite_params(data) + + group: str = data['groupUri'] + + EnvironmentService.validate_permissions(session, uri, data['permissions'], group) + + environment = EnvironmentService.get_environment_by_uri(session, uri) + + group_membership = EnvironmentService.find_environment_group( + session, group, environment.environmentUri + ) + if group_membership: + raise exceptions.UnauthorizedOperation( + action='INVITE_TEAM', + message=f'Team {group} is already a member of the environment {environment.name}', + ) + + if data.get('environmentIAMRoleName'): + env_group_iam_role_name = data['environmentIAMRoleName'] + env_role_imported = True + else: + env_group_iam_role_name = NamingConventionService( + target_uri=environment.environmentUri, + target_label=group, + pattern=NamingConventionPattern.IAM, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + env_role_imported = False + + athena_workgroup = NamingConventionService( + target_uri=environment.environmentUri, + target_label=group, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + + environment_group = EnvironmentGroup( + environmentUri=environment.environmentUri, + groupUri=group, + invitedBy=get_context().username, + environmentIAMRoleName=env_group_iam_role_name, + environmentIAMRoleArn=f'arn:aws:iam::{environment.AwsAccountId}:role/{env_group_iam_role_name}', + environmentIAMRoleImported=env_role_imported, + environmentAthenaWorkGroup=athena_workgroup, + ) + session.add(environment_group) + session.commit() + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + resource_uri=environment.environmentUri, + permissions=data['permissions'], + resource_type=Environment.__name__, + ) + return environment, environment_group + + @staticmethod + def validate_permissions(session, uri, g_permissions, group): + if permissions.INVITE_ENVIRONMENT_GROUP in g_permissions: + g_permissions.append(permissions.LIST_ENVIRONMENT_GROUPS) + g_permissions.append(permissions.REMOVE_ENVIRONMENT_GROUP) + + if permissions.ADD_ENVIRONMENT_CONSUMPTION_ROLES in g_permissions: + g_permissions.append(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) + + if permissions.CREATE_NETWORK in g_permissions: + g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) + + g_permissions.append(permissions.GET_ENVIRONMENT) + g_permissions.append(permissions.LIST_ENVIRONMENT_GROUPS) + g_permissions.append(permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) + g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) + g_permissions.append(permissions.CREDENTIALS_ENVIRONMENT) + + g_permissions = list(set(g_permissions)) + + if g_permissions not in permissions.ENVIRONMENT_INVITED: + exceptions.PermissionUnauthorized( + action='INVITE_TEAM', group_name=group, resource_uri=uri + ) + + env_group_permissions = [] + for p in g_permissions: + env_group_permissions.append( + Permission.find_permission_by_name( + session=session, + permission_name=p, + permission_type=PermissionType.RESOURCE.name, + ) + ) + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.REMOVE_ENVIRONMENT_GROUP) + def remove_group(session, uri, group): + environment = EnvironmentService.get_environment_by_uri(session, uri) + + if group == environment.SamlGroupName: + raise exceptions.UnauthorizedOperation( + action='REMOVE_TEAM', + message=f'Team: {group} is the owner of the environment {environment.name}', + ) + + group_env_objects_count = EnvironmentResourceManager.count_group_resources( + session=session, + environment=environment, + group_uri=group + ) + + if group_env_objects_count > 0: + raise exceptions.EnvironmentResourcesFound( + action='Remove Team', + message=f'Team: {group} has created {group_env_objects_count} resources on this environment.', + ) + + group_membership = EnvironmentService.find_environment_group( + session, group, environment.environmentUri + ) + if group_membership: + session.delete(group_membership) + session.commit() + + ResourcePolicy.delete_resource_policy( + session=session, + group=group, + resource_uri=environment.environmentUri, + resource_type=Environment.__name__, + ) + return environment + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.UPDATE_ENVIRONMENT_GROUP) + def update_group_permissions(session, uri, data=None): + EnvironmentService.validate_invite_params(data) + + group = data['groupUri'] + + EnvironmentService.validate_permissions(session, uri, data['permissions'], group) + + environment = EnvironmentService.get_environment_by_uri(session, uri) + + group_membership = EnvironmentService.find_environment_group( + session, group, environment.environmentUri + ) + if not group_membership: + raise exceptions.UnauthorizedOperation( + action='UPDATE_TEAM_ENVIRONMENT_PERMISSIONS', + message=f'Team {group.name} is not a member of the environment {environment.name}', + ) + + ResourcePolicy.delete_resource_policy( + session=session, + group=group, + resource_uri=environment.environmentUri, + resource_type=Environment.__name__, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + resource_uri=environment.environmentUri, + permissions=data['permissions'], + resource_type=Environment.__name__, + ) + return environment + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) + def list_group_permissions(session, uri, group_uri): + # the permission checked + return EnvironmentService.list_group_permissions_internal(session, uri, group_uri) + + @staticmethod + def list_group_permissions_internal(session, uri, group_uri): + """No permission check, only for internal usages""" + environment = EnvironmentService.get_environment_by_uri(session, uri) + + return ResourcePolicy.get_resource_policy_permissions( + session=session, + group_uri=group_uri, + resource_uri=environment.environmentUri, + ) + + @staticmethod + def list_group_invitation_permissions( + session, username, groups, uri, data=None, check_perm=None + ): + group_invitation_permissions = [] + for p in permissions.ENVIRONMENT_INVITATION_REQUEST: + group_invitation_permissions.append( + Permission.find_permission_by_name( + session=session, + permission_name=p, + permission_type=PermissionType.RESOURCE.name, + ) + ) + return group_invitation_permissions + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.ADD_ENVIRONMENT_CONSUMPTION_ROLES) + def add_consumption_role(session, uri, data=None) -> (Environment, EnvironmentGroup): + + group: str = data['groupUri'] + IAMRoleArn: str = data['IAMRoleArn'] + environment = EnvironmentService.get_environment_by_uri(session, uri) + + alreadyAdded = EnvironmentService.find_consumption_roles_by_IAMArn( + session, environment.environmentUri, IAMRoleArn + ) + if alreadyAdded: + raise exceptions.UnauthorizedOperation( + action='ADD_CONSUMPTION_ROLE', + message=f'IAM role {IAMRoleArn} is already added to the environment {environment.name}', + ) + + consumption_role = ConsumptionRole( + consumptionRoleName=data['consumptionRoleName'], + environmentUri=environment.environmentUri, + groupUri=group, + IAMRoleArn=IAMRoleArn, + IAMRoleName=IAMRoleArn.split("/")[-1], + ) + + session.add(consumption_role) + session.commit() + + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + resource_uri=consumption_role.consumptionRoleUri, + permissions=permissions.CONSUMPTION_ROLE_ALL, + resource_type=ConsumptionRole.__name__, + ) + return consumption_role + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.REMOVE_ENVIRONMENT_CONSUMPTION_ROLE) + def remove_consumption_role(session, uri, env_uri): + consumption_role = EnvironmentService.get_environment_consumption_role(session, uri, env_uri) + + num_resources = EnvironmentResourceManager.count_consumption_role_resources(session, uri) + if num_resources > 0: + raise exceptions.EnvironmentResourcesFound( + action='Remove Consumption Role', + message=f'Consumption role: {consumption_role.consumptionRoleName} has created {num_resources} resources on this environment.', + ) + + if consumption_role: + session.delete(consumption_role) + session.commit() + + ResourcePolicy.delete_resource_policy( + session=session, + group=consumption_role.groupUri, + resource_uri=consumption_role.consumptionRoleUri, + resource_type=ConsumptionRole.__name__, + ) + return True + + @staticmethod + def query_user_environments(session, username, groups, filter) -> Query: + query = ( + session.query(Environment) + .outerjoin( + EnvironmentGroup, + Environment.environmentUri + == EnvironmentGroup.environmentUri, + ) + .filter( + or_( + Environment.owner == username, + EnvironmentGroup.groupUri.in_(groups), + ) + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Environment.label.ilike('%' + term + '%'), + Environment.description.ilike('%' + term + '%'), + Environment.tags.contains(f'{{{term}}}'), + Environment.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + def paginated_user_environments(session, data=None) -> dict: + context = get_context() + return paginate( + query=EnvironmentService.query_user_environments(session, context.username, context.groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 5), + ).to_dict() + + @staticmethod + def query_user_environment_groups(session, groups, uri, filter) -> Query: + query = ( + session.query(EnvironmentGroup) + .filter(EnvironmentGroup.environmentUri == uri) + .filter(EnvironmentGroup.groupUri.in_(groups)) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + EnvironmentGroup.groupUri.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_GROUPS) + def paginated_user_environment_groups(session, uri, data=None) -> dict: + return paginate( + query=EnvironmentService.query_user_environment_groups( + session, get_context().groups, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 1000), + ).to_dict() + + @staticmethod + def query_all_environment_groups(session, uri, filter) -> Query: + query = session.query(EnvironmentGroup).filter( + EnvironmentGroup.environmentUri == uri + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + EnvironmentGroup.groupUri.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_GROUPS) + def paginated_all_environment_groups(session, uri, data=None) -> dict: + return paginate( + query=EnvironmentService.query_all_environment_groups( + session, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_GROUPS) + def list_environment_groups(session, uri) -> [str]: + return [ + g.groupUri + for g in EnvironmentService.query_user_environment_groups( + session, get_context().groups, uri, {} + ).all() + ] + + @staticmethod + def query_environment_invited_groups(session, uri, filter) -> Query: + query = ( + session.query(EnvironmentGroup) + .join( + Environment, + EnvironmentGroup.environmentUri + == Environment.environmentUri, + ) + .filter( + and_( + Environment.environmentUri == uri, + EnvironmentGroup.groupUri + != Environment.SamlGroupName, + ) + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + EnvironmentGroup.groupUri.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_GROUPS) + def paginated_environment_invited_groups(session, uri, data=None) -> dict: + return paginate( + query=EnvironmentService.query_environment_invited_groups(session, uri, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def list_environment_invited_groups(session, uri): + return EnvironmentService.query_environment_invited_groups(session, uri, {}).all() + + @staticmethod + def query_user_environment_consumption_roles(session, groups, uri, filter) -> Query: + query = ( + session.query(ConsumptionRole) + .filter(ConsumptionRole.environmentUri == uri) + .filter(ConsumptionRole.groupUri.in_(groups)) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + ConsumptionRole.consumptionRoleName.ilike('%' + term + '%'), + ) + ) + if filter and filter.get('groupUri'): + print("filter group") + group = filter['groupUri'] + query = query.filter( + or_( + ConsumptionRole.groupUri == group, + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) + def paginated_user_environment_consumption_roles(session, uri, data=None) -> dict: + return paginate( + query=EnvironmentService.query_user_environment_consumption_roles( + session, get_context().groups, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 1000), + ).to_dict() + + @staticmethod + def query_all_environment_consumption_roles(session, uri, filter) -> Query: + query = session.query(ConsumptionRole).filter( + ConsumptionRole.environmentUri == uri + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + ConsumptionRole.consumptionRoleName.ilike('%' + term + '%'), + ) + ) + if filter and filter.get('groupUri'): + group = filter['groupUri'] + query = query.filter( + or_( + ConsumptionRole.groupUri == group, + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) + def paginated_all_environment_consumption_roles( + session, uri, data=None + ) -> dict: + return paginate( + query=EnvironmentService.query_all_environment_consumption_roles( + session, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def find_consumption_roles_by_IAMArn(session, uri, arn) -> Query: + return session.query(ConsumptionRole).filter( + and_( + ConsumptionRole.environmentUri == uri, + ConsumptionRole.IAMRoleArn == arn + ) + ).first() + + @staticmethod + def query_environment_networks(session, uri, filter) -> Query: + query = session.query(Vpc).filter( + Vpc.environmentUri == uri, + ) + if filter.get('term'): + term = filter.get('term') + query = query.filter( + or_( + Vpc.label.ilike('%' + term + '%'), + Vpc.VpcId.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + @has_resource_permission(permissions.LIST_ENVIRONMENT_NETWORKS) + def paginated_environment_networks(session, uri, data=None) -> dict: + return paginate( + query=EnvironmentService.query_environment_networks(session, uri, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def validate_invite_params(data): + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('groupUri'): + raise exceptions.RequiredParameter('groupUri') + if not data.get('permissions'): + raise exceptions.RequiredParameter('permissions') + + @staticmethod + def find_environment_group(session, group_uri, environment_uri): + try: + env_group = EnvironmentService.get_environment_group(session, group_uri, environment_uri) + return env_group + except Exception: + return None + + @staticmethod + def get_environment_group(session, group_uri, environment_uri): + env_group = ( + session.query(EnvironmentGroup) + .filter( + ( + and_( + EnvironmentGroup.groupUri == group_uri, + EnvironmentGroup.environmentUri == environment_uri, + ) + ) + ) + .first() + ) + if not env_group: + raise exceptions.ObjectNotFound( + 'EnvironmentGroup', f'({group_uri},{environment_uri})' + ) + return env_group + + @staticmethod + def get_environment_consumption_role(session, role_uri, environment_uri): + role = ( + session.query(ConsumptionRole) + .filter( + ( + and_( + ConsumptionRole.consumptionRoleUri == role_uri, + ConsumptionRole.environmentUri == environment_uri, + ) + ) + ) + .first() + ) + if not role: + raise exceptions.ObjectNotFound( + 'ConsumptionRoleUri', f'({role_uri},{environment_uri})' + ) + return role + + @staticmethod + def get_environment_by_uri(session, uri) -> Environment: + return EnvironmentRepository.get_environment_by_uri(session, uri) + + @staticmethod + @has_resource_permission(permissions.GET_ENVIRONMENT) + def find_environment_by_uri(session, uri) -> Environment: + return EnvironmentService.get_environment_by_uri(session, uri) + + @staticmethod + def list_all_active_environments(session) -> [Environment]: + """ + Lists all active dataall environments + :param session: + :return: [Environment] + """ + environments: [Environment] = ( + session.query(Environment) + .filter(Environment.deleted.is_(None)) + .all() + ) + log.info( + f'Retrieved all active dataall environments {[e.AwsAccountId for e in environments]}' + ) + return environments + + @staticmethod + @has_resource_permission(permissions.GET_ENVIRONMENT) + def get_stack(session, uri, stack_uri) -> Stack: + return session.query(Stack).get(stack_uri) + + @staticmethod + @has_resource_permission(permissions.DELETE_ENVIRONMENT) + def delete_environment(session, uri, environment): + env_groups = ( + session.query(EnvironmentGroup) + .filter(EnvironmentGroup.environmentUri == uri) + .all() + ) + env_roles = ( + session.query(ConsumptionRole) + .filter(ConsumptionRole.environmentUri == uri) + .all() + ) + + env_resources = 0 + for group in env_groups: + env_resources += EnvironmentResourceManager.count_group_resources( + session, + environment, + group.groupUri + ) + for role in env_roles: + env_resources += EnvironmentResourceManager.count_consumption_role_resources( + session, + environment, + role.consumptionRoleUri + ) + + if env_resources > 0: + raise exceptions.EnvironmentResourcesFound( + action='Delete Environment', + message=f'Found {env_resources} resources on environment {environment.label} - Delete all environment related objects before proceeding', + ) + else: + EnvironmentResourceManager.delete_env(session, environment) + EnvironmentParameterRepository(session).delete_params(environment.environmentUri) + + for group in env_groups: + session.delete(group) + + ResourcePolicy.delete_resource_policy( + session=session, + resource_uri=uri, + group=group.groupUri, + ) + + for role in env_roles: + session.delete(role) + + KeyValueTag.delete_key_value_tags( + session, environment.environmentUri, 'environment' + ) + + return session.delete(environment) + + @staticmethod + def get_environment_parameters(session, env_uri): + return EnvironmentParameterRepository(session).get_params(env_uri) + + @staticmethod + def get_boolean_env_param(session, env: Environment, param: str) -> bool: + param = EnvironmentParameterRepository(session).get_param(env.environmentUri, param) + return param is not None and param.value.lower() == "true" diff --git a/tests/db/__init__.py b/backend/dataall/core/environment/tasks/__init__.py similarity index 100% rename from tests/db/__init__.py rename to backend/dataall/core/environment/tasks/__init__.py diff --git a/backend/dataall/core/environment/tasks/env_stacks_updater.py b/backend/dataall/core/environment/tasks/env_stacks_updater.py new file mode 100644 index 000000000..3b415e4bd --- /dev/null +++ b/backend/dataall/core/environment/tasks/env_stacks_updater.py @@ -0,0 +1,71 @@ +import logging +import os +import sys +import time + +from dataall.base.loader import ImportMode, load_modules +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.environment.services.env_stack_finder import StackFinder +from dataall.core.stacks.aws.ecs import Ecs +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.base.db import get_engine +from dataall.base.utils import Parameter + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + +RETRIES = 30 +SLEEP_TIME = 30 + + +def update_stacks(engine, envname): + with engine.scoped_session() as session: + all_environments: [Environment] = EnvironmentService.list_all_active_environments(session) + additional_stacks = [] + for finder in StackFinder.all(): + additional_stacks.extend(finder.find_stack_uris(session)) + + log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') + environment: Environment + for environment in all_environments: + update_stack(session=session, envname=envname, target_uri=environment.environmentUri, wait=True) + + for stack_uri in additional_stacks: + update_stack(session=session, envname=envname, target_uri=stack_uri, wait=False) + + return len(all_environments), len(additional_stacks) + + +def update_stack(session, envname, target_uri, wait=False): + stack = Stack.get_stack_by_target_uri( + session, target_uri=target_uri + ) + cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') + if not Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): + stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack_uri=stack.stackUri) + if wait: + retries = 1 + while Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): + log.info(f"Update for {stack.name}//{stack.stackUri} is not complete, waiting for {SLEEP_TIME} seconds...") + time.sleep(SLEEP_TIME) + retries = retries + 1 + if retries > RETRIES: + log.info(f"Maximum number of retries exceeded ({RETRIES} retries), continuing task...") + break + log.info(f"Update for {stack.name}//{stack.stackUri} COMPLETE or maximum number of retries exceeded ({RETRIES} retries)") + else: + log.info( + f'Stack update is already running... Skipping stack {stack.name}//{stack.stackUri}' + ) + + +if __name__ == '__main__': + envname = os.environ.get('envname', 'local') + engine = get_engine(envname=envname) + + load_modules({ImportMode.STACK_UPDATER_TASK}) + update_stacks(engine=engine, envname=envname) diff --git a/backend/dataall/core/feature_toggle_checker.py b/backend/dataall/core/feature_toggle_checker.py new file mode 100644 index 000000000..2cb1f9ccb --- /dev/null +++ b/backend/dataall/core/feature_toggle_checker.py @@ -0,0 +1,19 @@ +""" +Contains decorators that check if a feature has been enabled or not +""" +from dataall.base.config import config +from dataall.base.utils.decorator_utls import process_func + + +def is_feature_enabled(config_property: str): + def decorator(f): + fn, fn_decorator = process_func(f) + + def decorated(*args, **kwargs): + value = config.get_property(config_property) + if not value: + raise Exception(f"Disabled by config {config_property}") + return fn(*args, **kwargs) + + return fn_decorator(decorated) + return decorator diff --git a/backend/dataall/core/notifications/__init__.py b/backend/dataall/core/notifications/__init__.py new file mode 100644 index 000000000..6966ca66f --- /dev/null +++ b/backend/dataall/core/notifications/__init__.py @@ -0,0 +1 @@ +from dataall.core.notifications import api diff --git a/backend/dataall/core/notifications/api/__init__.py b/backend/dataall/core/notifications/api/__init__.py new file mode 100644 index 000000000..ece87de96 --- /dev/null +++ b/backend/dataall/core/notifications/api/__init__.py @@ -0,0 +1,9 @@ +from . import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/core/notifications/api/input_types.py b/backend/dataall/core/notifications/api/input_types.py new file mode 100644 index 000000000..9f01dd2ea --- /dev/null +++ b/backend/dataall/core/notifications/api/input_types.py @@ -0,0 +1,14 @@ +from dataall.base.api import gql + +NotificationFilter = gql.InputType( + name='NotificationFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='read', type=gql.Boolean), + gql.Argument(name='unread', type=gql.Boolean), + gql.Argument(name='archived', type=gql.Boolean), + gql.Argument(name='type', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) diff --git a/backend/dataall/core/notifications/api/mutations.py b/backend/dataall/core/notifications/api/mutations.py new file mode 100644 index 000000000..3a5f64375 --- /dev/null +++ b/backend/dataall/core/notifications/api/mutations.py @@ -0,0 +1,19 @@ +from dataall.base.api import gql +from .resolvers import * + + +markNotificationAsRead = gql.MutationField( + name='markNotificationAsRead', + args=[ + gql.Argument(name='notificationUri', type=gql.String), + ], + type=gql.Boolean, + resolver=mark_as_read, +) + +deleteNotification = gql.MutationField( + name='deleteNotification', + args=[gql.Argument(name='notificationUri', type=gql.String)], + type=gql.Boolean, + resolver=delete, +) diff --git a/backend/dataall/core/notifications/api/queries.py b/backend/dataall/core/notifications/api/queries.py new file mode 100644 index 000000000..c4a30ba87 --- /dev/null +++ b/backend/dataall/core/notifications/api/queries.py @@ -0,0 +1,30 @@ +from dataall.base.api import gql +from .resolvers import * + + +listNotifications = gql.QueryField( + name='listNotifications', + args=[ + gql.Argument(name='filter', type=gql.Ref('NotificationFilter')), + ], + type=gql.Ref('NotificationSearchResult'), + resolver=list_my_notifications, +) + +countUnreadNotifications = gql.QueryField( + name='countUnreadNotifications', + type=gql.Integer, + resolver=count_unread_notifications, +) + +countReadNotifications = gql.QueryField( + name='countReadNotifications', + type=gql.Integer, + resolver=count_read_notifications, +) + +countDeletedNotifications = gql.QueryField( + name='countDeletedNotifications', + type=gql.Integer, + resolver=count_deleted_notifications, +) diff --git a/backend/dataall/core/notifications/api/resolvers.py b/backend/dataall/core/notifications/api/resolvers.py new file mode 100644 index 000000000..0c78c5ba1 --- /dev/null +++ b/backend/dataall/core/notifications/api/resolvers.py @@ -0,0 +1,48 @@ +import logging + +from dataall.base.api.context import Context +from dataall.core.notifications.db.notification_repositories import Notification + +log = logging.getLogger(__name__) + + +def list_my_notifications( + context: Context, + source, + filter: dict = None, +): + with context.engine.scoped_session() as session: + return Notification.paginated_notifications( + session=session, username=context.username, filter=filter + ) + + +def mark_as_read( + context: Context, + source, + notificationUri: str = None, +): + with context.engine.scoped_session() as session: + return Notification.read_notification(session, notificationUri) + + +def count_unread_notifications(context: Context, source): + with context.engine.scoped_session() as session: + return Notification.count_unread_notifications(session, context.username) + + +def count_deleted_notifications(context: Context, source): + with context.engine.scoped_session() as session: + return Notification.count_deleted_notifications( + session, context.username + ) + + +def count_read_notifications(context: Context, source): + with context.engine.scoped_session() as session: + return Notification.count_read_notifications(session, context.username) + + +def delete(context: Context, source, notificationUri): + with context.engine.scoped_session() as session: + return Notification.delete_notification(session, notificationUri) diff --git a/backend/dataall/core/notifications/api/types.py b/backend/dataall/core/notifications/api/types.py new file mode 100644 index 000000000..f91b8a522 --- /dev/null +++ b/backend/dataall/core/notifications/api/types.py @@ -0,0 +1,34 @@ +from dataall.base.api import gql +from dataall.core.notifications.db.notification_models import Notification + + +def resolve_enum(context, source: Notification): + return source.type.name + + +Notification = gql.ObjectType( + name='Notification', + fields=[ + gql.Field(name='notificationUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='type', type=gql.String, resolver=resolve_enum), + gql.Field(name='message', type=gql.String), + gql.Field(name='username', type=gql.NonNullableType(gql.String)), + gql.Field(name='target_uri', type=gql.NonNullableType(gql.String)), + gql.Field(name='is_read', type=gql.Boolean), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + ], +) + + +NotificationSearchResult = gql.ObjectType( + name='NotificationSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(Notification)), + ], +) diff --git a/tests/searchproxy/__init__.py b/backend/dataall/core/notifications/db/__init__.py similarity index 100% rename from tests/searchproxy/__init__.py rename to backend/dataall/core/notifications/db/__init__.py diff --git a/backend/dataall/core/notifications/db/notification_models.py b/backend/dataall/core/notifications/db/notification_models.py new file mode 100644 index 000000000..150c54516 --- /dev/null +++ b/backend/dataall/core/notifications/db/notification_models.py @@ -0,0 +1,31 @@ +import enum +from datetime import datetime + +from sqlalchemy import Column, String, Boolean, Enum, DateTime + +from dataall.base.db import Base +from dataall.base.db import utils + + +class NotificationType(enum.Enum): + SHARE_OBJECT_SUBMITTED = 'SHARE_OBJECT_SUBMITTED' + SHARE_ITEM_REQUEST = 'SHARE_ITEM_REQUEST' + SHARE_OBJECT_APPROVED = 'SHARE_OBJECT_APPROVED' + SHARE_OBJECT_REJECTED = 'SHARE_OBJECT_REJECTED' + SHARE_OBJECT_PENDING_APPROVAL = 'SHARE_OBJECT_PENDING_APPROVAL' + DATASET_VERSION = 'DATASET_VERSION' + + +class Notification(Base): + __tablename__ = 'notification' + notificationUri = Column( + String, primary_key=True, default=utils.uuid('notificationtype') + ) + type = Column(Enum(NotificationType), nullable=True) + message = Column(String, nullable=False) + username = Column(String, nullable=False) + is_read = Column(Boolean, nullable=False, default=False) + target_uri = Column(String) + created = Column(DateTime, default=datetime.now) + updated = Column(DateTime, onupdate=datetime.now) + deleted = Column(DateTime) diff --git a/backend/dataall/core/notifications/db/notification_repositories.py b/backend/dataall/core/notifications/db/notification_repositories.py new file mode 100644 index 000000000..1f12aa215 --- /dev/null +++ b/backend/dataall/core/notifications/db/notification_repositories.py @@ -0,0 +1,103 @@ +from datetime import datetime + +from sqlalchemy import func, and_ + +from dataall.core.notifications.db import notification_models as models +from dataall.base.db import paginate + + +class Notification: + def __init__(self): + pass + + @staticmethod + def create( + session, + username, + notification_type: models.NotificationType, + target_uri, + message, + ) -> models.Notification: + notification = models.Notification( + type=notification_type, + message=message, + username=username, + target_uri=target_uri, + ) + session.add(notification) + session.commit() + return notification + + @staticmethod + def paginated_notifications(session, username, filter=None): + if not filter: + filter = {} + q = session.query(models.Notification).filter( + models.Notification.username == username + ) + if filter.get('read'): + q = q.filter( + and_( + models.Notification.is_read == True, + models.Notification.deleted.is_(None), + ) + ) + if filter.get('unread'): + q = q.filter( + and_( + models.Notification.is_read == False, + models.Notification.deleted.is_(None), + ) + ) + if filter.get('archived'): + q = q.filter(models.Notification.deleted.isnot(None)) + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) + ).to_dict() + + @staticmethod + def count_unread_notifications(session, username): + count = ( + session.query(func.count(models.Notification.notificationUri)) + .filter(models.Notification.username == username) + .filter(models.Notification.is_read == False) + .filter(models.Notification.deleted.is_(None)) + .scalar() + ) + return int(count) + + @staticmethod + def count_read_notifications(session, username): + count = ( + session.query(func.count(models.Notification.notificationUri)) + .filter(models.Notification.username == username) + .filter(models.Notification.is_read == True) + .filter(models.Notification.deleted.is_(None)) + .scalar() + ) + return int(count) + + @staticmethod + def count_deleted_notifications(session, username): + count = ( + session.query(func.count(models.Notification.notificationUri)) + .filter(models.Notification.username == username) + .filter(models.Notification.deleted.isnot(None)) + .scalar() + ) + return int(count) + + @staticmethod + def read_notification(session, notificationUri): + notification = session.query(models.Notification).get(notificationUri) + notification.is_read = True + session.commit() + return True + + @staticmethod + def delete_notification(session, notificationUri): + notification = session.query(models.Notification).get(notificationUri) + if notification: + notification.deleted = datetime.now() + session.commit() + return True diff --git a/backend/dataall/core/organizations/__init__.py b/backend/dataall/core/organizations/__init__.py new file mode 100644 index 000000000..1376f5648 --- /dev/null +++ b/backend/dataall/core/organizations/__init__.py @@ -0,0 +1 @@ +from dataall.core.organizations import api diff --git a/backend/dataall/core/organizations/api/__init__.py b/backend/dataall/core/organizations/api/__init__.py new file mode 100644 index 000000000..ece87de96 --- /dev/null +++ b/backend/dataall/core/organizations/api/__init__.py @@ -0,0 +1,9 @@ +from . import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/core/organizations/api/enums.py b/backend/dataall/core/organizations/api/enums.py new file mode 100644 index 000000000..60713a5bf --- /dev/null +++ b/backend/dataall/core/organizations/api/enums.py @@ -0,0 +1,9 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class OrganisationUserRole(GraphQLEnumMapper): + Owner = '999' + Admin = '900' + Member = '100' + NotMember = '000' + Invited = '800' diff --git a/backend/dataall/core/organizations/api/input_types.py b/backend/dataall/core/organizations/api/input_types.py new file mode 100644 index 000000000..87f69d1cb --- /dev/null +++ b/backend/dataall/core/organizations/api/input_types.py @@ -0,0 +1,82 @@ +from dataall.base.api.constants import GraphQLEnumMapper, SortDirection +from dataall.base.api import gql +from dataall.core.organizations.api.enums import OrganisationUserRole + +NewOrganizationInput = gql.InputType( + name='NewOrganizationInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='SamlGroupName', type=gql.String), + ], +) + +ModifyOrganizationInput = gql.InputType( + name='ModifyOrganizationInput', + arguments=[ + gql.Argument('label', gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='SamlGroupName', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + ], +) + + +class OrganizationSortField(GraphQLEnumMapper): + created = 'created' + updated = 'updated' + label = 'label' + + +OrganizationSortCriteria = gql.InputType( + name='OrganizationSortCriteria', + arguments=[ + gql.Argument( + name='field', + type=gql.NonNullableType(OrganizationSortField.toGraphQLEnum()), + ), + gql.Argument( + name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) + ), + ], +) + +OrganizationFilter = gql.InputType( + name='OrganizationFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('displayArchived', gql.Boolean), + gql.Argument('sort', gql.ArrayType(OrganizationSortCriteria)), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + gql.Argument('roles', gql.ArrayType(OrganisationUserRole.toGraphQLEnum())), + gql.Argument('tags', gql.ArrayType(gql.String)), + ], +) + + +OrganizationTopicFilter = gql.InputType( + name='OrganizationTopicFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +OrganizationTopicInput = gql.InputType( + name='OrganizationTopicInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + ], +) + +InviteGroupToOrganizationInput = gql.InputType( + name='InviteGroupToOrganizationInput', + arguments=[ + gql.Argument('organizationUri', gql.NonNullableType(gql.String)), + gql.Argument('groupUri', gql.NonNullableType(gql.String)), + ], +) diff --git a/backend/dataall/core/organizations/api/mutations.py b/backend/dataall/core/organizations/api/mutations.py new file mode 100644 index 000000000..085777a10 --- /dev/null +++ b/backend/dataall/core/organizations/api/mutations.py @@ -0,0 +1,55 @@ +from dataall.base.api import gql +from .input_types import ( + ModifyOrganizationInput, + NewOrganizationInput, + InviteGroupToOrganizationInput, +) +from .resolvers import * +from .types import Organization + +createOrganization = gql.MutationField( + name='createOrganization', + args=[gql.Argument(name='input', type=NewOrganizationInput)], + type=gql.Thunk(lambda: Organization), + resolver=create_organization, + test_scope='Organization', +) + +updateOrganization = gql.MutationField( + name='updateOrganization', + args=[ + gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.NonNullableType(ModifyOrganizationInput)), + ], + type=gql.Thunk(lambda: Organization), + resolver=update_organization, + test_scope='Organization', +) + +archiveOrganization = gql.MutationField( + name='archiveOrganization', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + resolver=archive_organization, + type=gql.Boolean, +) + +inviteGroupToOrganization = gql.MutationField( + name='inviteGroupToOrganization', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(InviteGroupToOrganizationInput) + ) + ], + type=gql.Ref('Organization'), + resolver=invite_group, +) + +removeGroupFromOrganization = gql.MutationField( + name='removeGroupFromOrganization', + args=[ + gql.Argument('organizationUri', type=gql.NonNullableType(gql.String)), + gql.Argument('groupUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Ref('Organization'), + resolver=remove_group, +) diff --git a/backend/dataall/core/organizations/api/queries.py b/backend/dataall/core/organizations/api/queries.py new file mode 100644 index 000000000..32625e8d9 --- /dev/null +++ b/backend/dataall/core/organizations/api/queries.py @@ -0,0 +1,44 @@ +from dataall.base.api import gql +from .input_types import OrganizationFilter +from .resolvers import * +from .types import ( + Organization, + OrganizationSearchResult, +) + +getOrganization = gql.QueryField( + name='getOrganization', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], + type=gql.Thunk(lambda: Organization), + resolver=get_organization, + test_scope='Organization', +) + + +listOrganizations = gql.QueryField( + name='listOrganizations', + args=[gql.Argument('filter', OrganizationFilter)], + type=OrganizationSearchResult, + resolver=list_organizations, + test_scope='Organization', +) + +listOrganizationInvitedGroups = gql.QueryField( + name='listOrganizationInvitedGroups', + type=gql.Ref('GroupSearchResult'), + args=[ + gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + resolver=list_organization_invited_groups, +) + +listOrganizationGroups = gql.QueryField( + name='listOrganizationGroups', + type=gql.Ref('GroupSearchResult'), + args=[ + gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + resolver=list_organization_groups, +) diff --git a/backend/dataall/core/organizations/api/resolvers.py b/backend/dataall/core/organizations/api/resolvers.py new file mode 100644 index 000000000..458a2ddb4 --- /dev/null +++ b/backend/dataall/core/organizations/api/resolvers.py @@ -0,0 +1,146 @@ +from dataall.base.api.context import Context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.organizations.api.enums import OrganisationUserRole +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.core.organizations.db import organization_models as models + + +def create_organization(context: Context, source, input=None): + with context.engine.scoped_session() as session: + organization = Organization.create_organization( + session=session, + data=input, + ) + return organization + + +def update_organization(context, source, organizationUri=None, input=None): + with context.engine.scoped_session() as session: + return Organization.update_organization( + session=session, + uri=organizationUri, + data=input, + ) + + +def get_organization(context: Context, source, organizationUri=None): + with context.engine.scoped_session() as session: + return Organization.get_organization_by_uri( + session=session, uri=organizationUri + ) + + +def list_organizations(context: Context, source, filter=None): + if not filter: + filter = {'page': 1, 'pageSize': 5} + + with context.engine.scoped_session() as session: + return Organization.paginated_user_organizations( + session=session, + data=filter, + ) + + +def list_organization_environments(context, source, filter=None): + if not filter: + filter = {'page': 1, 'pageSize': 5} + with context.engine.scoped_session() as session: + return Organization.paginated_organization_environments( + session=session, + uri=source.organizationUri, + data=filter, + ) + + +def stats(context, source: models.Organization, **kwargs): + with context.engine.scoped_session() as session: + environments = Organization.count_organization_environments( + session=session, uri=source.organizationUri + ) + + groups = Organization.count_organization_invited_groups( + session=session, uri=source.organizationUri, group=source.SamlGroupName + ) + + return {'environments': environments, 'groups': groups, 'users': 0} + + +def resolve_user_role(context: Context, source: models.Organization): + if source.owner == context.username: + return OrganisationUserRole.Owner.value + elif source.SamlGroupName in context.groups: + return OrganisationUserRole.Admin.value + else: + with context.engine.scoped_session() as session: + if Organization.find_organization_membership( + session=session, uri=source.organizationUri, groups=context.groups + ): + return OrganisationUserRole.Invited.value + return OrganisationUserRole.NoPermission.value + + +def archive_organization(context: Context, source, organizationUri: str = None): + with context.engine.scoped_session() as session: + return Organization.archive_organization( + session=session, + uri=organizationUri, + ) + + +def invite_group(context: Context, source, input): + with context.engine.scoped_session() as session: + organization, organization_group = Organization.invite_group( + session=session, + uri=input['organizationUri'], + data=input, + ) + return organization + + +def remove_group(context: Context, source, organizationUri=None, groupUri=None): + with context.engine.scoped_session() as session: + organization = Organization.remove_group( + session=session, + uri=organizationUri, + group=groupUri + ) + return organization + + +def list_organization_invited_groups( + context: Context, source, organizationUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return Organization.paginated_organization_invited_groups( + session=session, + uri=organizationUri, + data=filter, + ) + + +def list_organization_groups( + context: Context, source, organizationUri=None, filter=None +): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return Organization.paginated_organization_groups( + session=session, + uri=organizationUri, + data=filter, + ) + + +def resolve_organization_by_env(context, source, **kwargs): + """ + Resolves the organization for environmental resource. + """ + if not source: + return None + with context.engine.scoped_session() as session: + env: Environment = session.query(Environment).get( + source.environmentUri + ) + return session.query(models.Organization).get(env.organizationUri) diff --git a/backend/dataall/core/organizations/api/types.py b/backend/dataall/core/organizations/api/types.py new file mode 100644 index 000000000..22e0d216c --- /dev/null +++ b/backend/dataall/core/organizations/api/types.py @@ -0,0 +1,53 @@ +from .input_types import * +from .resolvers import * + +OrganizationStats = gql.ObjectType( + name='OrganizationStats', + fields=[ + gql.Field(name='groups', type=gql.Integer), + gql.Field(name='users', type=gql.Integer), + gql.Field(name='environments', type=gql.Integer), + ], +) +Organization = gql.ObjectType( + name='Organization', + fields=[ + gql.Field(name='organizationUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='owner', type=gql.String), + gql.Field(name='SamlGroupName', type=gql.String), + gql.Field( + name='userRoleInOrganization', + type=OrganisationUserRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field( + name='environments', + args=[gql.Argument(name='filter', type=gql.Ref('EnvironmentFilter'))], + type=gql.Ref('EnvironmentSearchResult'), + resolver=list_organization_environments, + ), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='stats', type=OrganizationStats, resolver=stats), + ], +) + + +OrganizationSearchResult = gql.ObjectType( + name='OrganizationSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(Organization)), + ], +) diff --git a/tests/tasks/__init__.py b/backend/dataall/core/organizations/db/__init__.py similarity index 100% rename from tests/tasks/__init__.py rename to backend/dataall/core/organizations/db/__init__.py diff --git a/backend/dataall/core/organizations/db/organization_models.py b/backend/dataall/core/organizations/db/organization_models.py new file mode 100644 index 000000000..4f6a0823e --- /dev/null +++ b/backend/dataall/core/organizations/db/organization_models.py @@ -0,0 +1,39 @@ +import datetime +from enum import Enum + +from sqlalchemy import Column, String, DateTime +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base +from dataall.base.db import Resource, utils + + +class OrganisationUserRole(Enum): + Owner = '999' + Admin = '900' + Member = '100' + NotMember = '000' + Invited = '800' + + +class Organization(Resource, Base): + __tablename__ = 'organization' + organizationUri = Column( + String, primary_key=True, default=utils.uuid('organization') + ) + + # `role` is a dynamically generated SQL expression + # computing the role of the user in an organization + userRoleInOrganization = query_expression() + SamlGroupName = Column(String, nullable=True) + + +class OrganizationGroup(Base): + __tablename__ = 'organization_group' + groupUri = Column(String, primary_key=True) + organizationUri = Column(String, primary_key=True) + invitedBy = Column(String, nullable=True) + description = Column(String, default='No description provided') + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + deleted = Column(DateTime) diff --git a/backend/dataall/core/organizations/db/organization_repositories.py b/backend/dataall/core/organizations/db/organization_repositories.py new file mode 100644 index 000000000..9574ea24e --- /dev/null +++ b/backend/dataall/core/organizations/db/organization_repositories.py @@ -0,0 +1,374 @@ +import logging + +from sqlalchemy import or_, and_ +from sqlalchemy.orm import Query + +from dataall.core.permissions import permissions +from dataall.base.db import exceptions, paginate +from dataall.core.organizations.db.organization_models import OrganizationGroup, OrganisationUserRole +from dataall.core.organizations.db import organization_models as models +from dataall.core.environment.db.environment_models import Environment +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.base.context import get_context +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.activity.db.activity_models import Activity + +logger = logging.getLogger(__name__) + + +class Organization: + @staticmethod + def get_organization_by_uri(session, uri: str) -> models.Organization: + if not uri: + raise exceptions.RequiredParameter(param_name='organizationUri') + org = Organization.find_organization_by_uri(session, uri) + if not org: + raise exceptions.ObjectNotFound('Organization', uri) + return org + + @staticmethod + def find_organization_by_uri(session, uri) -> models.Organization: + return session.query(models.Organization).get(uri) + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + def create_organization(session, data=None) -> models.Organization: + if not data: + raise exceptions.RequiredParameter(data) + if not data.get('SamlGroupName'): + raise exceptions.RequiredParameter('groupUri') + if not data.get('label'): + raise exceptions.RequiredParameter('label') + + username = get_context().username + org = models.Organization( + label=data.get('label'), + owner=username, + tags=data.get('tags', []), + description=data.get('description', 'No description provided'), + SamlGroupName=data.get('SamlGroupName'), + userRoleInOrganization=OrganisationUserRole.Owner.value, + ) + session.add(org) + session.commit() + member = models.OrganizationGroup( + organizationUri=org.organizationUri, + groupUri=data['SamlGroupName'], + ) + session.add(member) + + activity = Activity( + action='org:create', + label='org:create', + owner=username, + summary=f'{username} create organization {org.name} ', + targetUri=org.organizationUri, + targetType='org', + ) + session.add(activity) + + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlGroupName'], + permissions=permissions.ORGANIZATION_ALL, + resource_uri=org.organizationUri, + resource_type=models.Organization.__name__, + ) + + return org + + @staticmethod + @has_resource_permission(permissions.UPDATE_ORGANIZATION) + def update_organization(session, uri, data=None): + organization = Organization.get_organization_by_uri(session, uri) + for field in data.keys(): + setattr(organization, field, data.get(field)) + session.commit() + + context = get_context() + activity = Activity( + action='org:update', + label='org:create', + owner=context.username, + summary=f'{context.username} updated organization {organization.name} ', + targetUri=organization.organizationUri, + targetType='org', + ) + session.add(activity) + ResourcePolicy.attach_resource_policy( + session=session, + group=organization.SamlGroupName, + permissions=permissions.ORGANIZATION_ALL, + resource_uri=organization.organizationUri, + resource_type=models.Organization.__name__, + ) + return organization + + @staticmethod + def query_user_organizations(session, username, groups, filter) -> Query: + query = ( + session.query(models.Organization) + .outerjoin( + models.OrganizationGroup, + models.Organization.organizationUri == models.OrganizationGroup.organizationUri, + ) + .filter( + or_( + models.Organization.owner == username, + models.OrganizationGroup.groupUri.in_(groups), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + models.Organization.label.ilike('%' + filter.get('term') + '%'), + models.Organization.description.ilike('%' + filter.get('term') + '%'), + models.Organization.tags.contains(f"{{{filter.get('term')}}}"), + ) + ) + return query + + @staticmethod + def paginated_user_organizations(session, data=None) -> dict: + context = get_context() + return paginate( + query=Organization.query_user_organizations(session, context.username, context.groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def query_organization_environments(session, uri, filter) -> Query: + query = session.query(Environment).filter(Environment.organizationUri == uri) + if filter and filter.get('term'): + query = query.filter( + or_( + Environment.label.ilike('%' + filter.get('term') + '%'), + Environment.description.ilike('%' + filter.get('term') + '%'), + ) + ) + return query + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.GET_ORGANIZATION) + def paginated_organization_environments(session, uri, data=None) -> dict: + return paginate( + query=Organization.query_organization_environments(session, uri, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.DELETE_ORGANIZATION) + def archive_organization(session, uri) -> bool: + org = Organization.get_organization_by_uri(session, uri) + environments = session.query(Environment).filter(Environment.organizationUri == uri).count() + if environments: + raise exceptions.UnauthorizedOperation( + action='ARCHIVE_ORGANIZATION', + message='The organization you tried to delete has linked environments', + ) + session.delete(org) + ResourcePolicy.delete_resource_policy( + session=session, + group=org.SamlGroupName, + resource_uri=org.organizationUri, + resource_type=models.Organization.__name__, + ) + + return True + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.INVITE_ORGANIZATION_GROUP) + def invite_group(session, uri, data=None) -> (models.Organization, models.OrganizationGroup): + Organization.validate_invite_params(data) + + group: str = data['groupUri'] + + organization = Organization.get_organization_by_uri(session, uri) + + group_membership = Organization.find_group_membership(session, group, organization) + if group_membership: + raise exceptions.UnauthorizedOperation( + action='INVITE_TEAM', + message=f'Team {group} is already admin of the organization {organization.name}', + ) + org_group = OrganizationGroup( + organizationUri=organization.organizationUri, + groupUri=group, + invitedBy=get_context().username, + ) + session.add(org_group) + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + resource_uri=organization.organizationUri, + permissions=permissions.ORGANIZATION_INVITED, + resource_type=models.Organization.__name__, + ) + return organization, org_group + + @staticmethod + def find_group_membership(session, group, organization): + membership = ( + session.query(models.OrganizationGroup) + .filter( + ( + and_( + models.OrganizationGroup.groupUri == group, + models.OrganizationGroup.organizationUri == organization.organizationUri, + ) + ) + ) + .first() + ) + return membership + + @staticmethod + def validate_invite_params(data): + if not data: + raise exceptions.RequiredParameter(data) + if not data.get('groupUri'): + raise exceptions.RequiredParameter('groupUri') + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.REMOVE_ORGANIZATION_GROUP) + def remove_group(session, uri, group): + organization = Organization.get_organization_by_uri(session, uri) + + if group == organization.SamlGroupName: + raise exceptions.UnauthorizedOperation( + action='REMOVE_TEAM', + message=f'Team: {group} is the owner of the organization {organization.name}', + ) + + group_env_objects_count = ( + session.query(Environment) + .filter( + and_( + Environment.organizationUri == organization.organizationUri, + Environment.SamlGroupName == group, + ) + ) + .count() + ) + if group_env_objects_count > 0: + raise exceptions.OrganizationResourcesFound( + action='Remove Team', + message=f'Team: {group} has {group_env_objects_count} linked environments on this environment.', + ) + + group_membership = Organization.find_group_membership(session, group, organization) + if group_membership: + session.delete(group_membership) + session.commit() + + ResourcePolicy.delete_resource_policy( + session=session, + group=group, + resource_uri=organization.organizationUri, + resource_type=models.Organization.__name__, + ) + return organization + + @staticmethod + def query_organization_groups(session, uri, filter) -> Query: + query = session.query(models.OrganizationGroup).filter(models.OrganizationGroup.organizationUri == uri) + if filter and filter.get('term'): + query = query.filter( + or_( + models.OrganizationGroup.groupUri.ilike('%' + filter.get('term') + '%'), + ) + ) + return query + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.GET_ORGANIZATION) + def paginated_organization_groups(session, uri, data=None) -> dict: + return paginate( + query=Organization.query_organization_groups(session, uri, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def query_organization_invited_groups(session, organization, filter) -> Query: + query = ( + session.query(models.OrganizationGroup) + .join( + models.Organization, + models.OrganizationGroup.organizationUri == models.Organization.organizationUri, + ) + .filter( + and_( + models.Organization.organizationUri == organization.organizationUri, + models.OrganizationGroup.groupUri != models.Organization.SamlGroupName, + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + models.OrganizationGroup.groupUri.ilike('%' + filter.get('term') + '%'), + ) + ) + return query + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ORGANIZATIONS) + @has_resource_permission(permissions.GET_ORGANIZATION) + def paginated_organization_invited_groups(session, uri, data=None) -> dict: + organization = Organization.get_organization_by_uri(session, uri) + return paginate( + query=Organization.query_organization_invited_groups(session, organization, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def count_organization_invited_groups(session, uri, group) -> int: + groups = ( + session.query(models.OrganizationGroup) + .filter( + and_( + models.OrganizationGroup.organizationUri == uri, + models.OrganizationGroup.groupUri != group, + ) + ) + .count() + ) + return groups + + @staticmethod + def count_organization_environments(session, uri) -> int: + envs = ( + session.query(Environment) + .filter( + Environment.organizationUri == uri, + ) + .count() + ) + return envs + + @staticmethod + def find_organization_membership(session, uri, groups) -> int: + groups = ( + session.query(models.OrganizationGroup) + .filter( + and_( + models.OrganizationGroup.organizationUri == uri, + models.OrganizationGroup.groupUri.in_(groups), + ) + ) + .count() + ) + if groups >= 1: + return True + else: + return False diff --git a/backend/dataall/core/permissions/__init__.py b/backend/dataall/core/permissions/__init__.py new file mode 100644 index 000000000..9e9b57c06 --- /dev/null +++ b/backend/dataall/core/permissions/__init__.py @@ -0,0 +1 @@ +from dataall.core.permissions import api diff --git a/backend/dataall/core/permissions/api/__init__.py b/backend/dataall/core/permissions/api/__init__.py new file mode 100644 index 000000000..84e4b63a9 --- /dev/null +++ b/backend/dataall/core/permissions/api/__init__.py @@ -0,0 +1,9 @@ +from dataall.core.permissions.api import ( + input_types, + queries, + resolvers, + types, + mutations +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/core/permissions/api/input_types.py b/backend/dataall/core/permissions/api/input_types.py new file mode 100644 index 000000000..06b26ad7a --- /dev/null +++ b/backend/dataall/core/permissions/api/input_types.py @@ -0,0 +1,10 @@ +from dataall.base.api import gql + + +UpdateGroupTenantPermissionsInput = gql.InputType( + name='UpdateGroupTenantPermissionsInput', + arguments=[ + gql.Argument('permissions', gql.ArrayType(gql.String)), + gql.Argument('groupUri', gql.NonNullableType(gql.String)), + ], +) diff --git a/backend/dataall/core/permissions/api/mutations.py b/backend/dataall/core/permissions/api/mutations.py new file mode 100644 index 000000000..1ba069ba8 --- /dev/null +++ b/backend/dataall/core/permissions/api/mutations.py @@ -0,0 +1,25 @@ +from dataall.base.api import gql +from .input_types import UpdateGroupTenantPermissionsInput +from .resolvers import * + + +updateGroupPermission = gql.MutationField( + name='updateGroupTenantPermissions', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(UpdateGroupTenantPermissionsInput) + ) + ], + type=gql.Boolean, + resolver=update_group_permissions, +) + +updateSSMParameter = gql.MutationField( + name='updateSSMParameter', + args=[ + gql.Argument(name='name', type=gql.NonNullableType(gql.String)), + gql.Argument(name='value', type=gql.NonNullableType(gql.String)) + ], + type=gql.String, + resolver=update_ssm_parameter, +) diff --git a/backend/dataall/core/permissions/api/queries.py b/backend/dataall/core/permissions/api/queries.py new file mode 100644 index 000000000..773e6a65a --- /dev/null +++ b/backend/dataall/core/permissions/api/queries.py @@ -0,0 +1,18 @@ +from dataall.base.api import gql +from .resolvers import * + + +listTenantPermissions = gql.QueryField( + name='listTenantPermissions', + type=gql.ArrayType(gql.Ref('Permission')), + resolver=list_tenant_permissions, +) + +listTenantGroups = gql.QueryField( + name='listTenantGroups', + args=[ + gql.Argument(name='filter', type=gql.Ref('GroupFilter')), + ], + type=gql.Ref('GroupSearchResult'), + resolver=list_tenant_groups, +) diff --git a/backend/dataall/core/permissions/api/resolvers.py b/backend/dataall/core/permissions/api/resolvers.py new file mode 100644 index 000000000..70a8cf087 --- /dev/null +++ b/backend/dataall/core/permissions/api/resolvers.py @@ -0,0 +1,48 @@ +import logging +import os + +from dataall.base.aws.sts import SessionHelper +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy + +log = logging.getLogger(__name__) + + +def update_group_permissions(context, source, input=None): + with context.engine.scoped_session() as session: + return TenantPolicy.update_group_permissions( + session=session, + username=context.username, + groups=context.groups, + uri=input['groupUri'], + data=input, + check_perm=True, + ) + + +def list_tenant_permissions(context, source): + with context.engine.scoped_session() as session: + return TenantPolicy.list_tenant_permissions( + session=session, username=context.username, groups=context.groups + ) + + +def list_tenant_groups(context, source, filter=None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return TenantPolicy.list_tenant_groups( + session=session, + username=context.username, + groups=context.groups, + uri=None, + data=filter, + check_perm=True, + ) + + +def update_ssm_parameter(context, source, name: str = None, value: str = None): + current_account = SessionHelper.get_account() + region = os.getenv('AWS_REGION', 'eu-west-1') + response = ParameterStoreManager.update_parameter(AwsAccountId=current_account, region=region, parameter_name=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/{name}', parameter_value=value) + return response diff --git a/backend/dataall/core/permissions/api/types.py b/backend/dataall/core/permissions/api/types.py new file mode 100644 index 000000000..af39431f3 --- /dev/null +++ b/backend/dataall/core/permissions/api/types.py @@ -0,0 +1,41 @@ +from dataall.base.api import gql +from dataall.core.notifications.db.notification_models import Notification +from dataall.core.permissions.db.permission_models import PermissionType + + +def resolve_enum(context, source: Notification): + return source.type.name if source.type else PermissionType.TENANT.name + + +Permission = gql.ObjectType( + name='Permission', + fields=[ + gql.Field(name='permissionUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='type', type=gql.String, resolver=resolve_enum), + gql.Field(name='name', type=gql.NonNullableType(gql.String)), + gql.Field(name='description', type=gql.NonNullableType(gql.String)), + ], +) + + +PermissionSearchResult = gql.ObjectType( + name='PermissionSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(Permission)), + ], +) + + +Tenant = gql.ObjectType( + name='Tenant', + fields=[ + gql.Field(name='tenantUri', type=gql.ID), + gql.Field(name='name', type=gql.String), + gql.Field(name='created', type=gql.String), + ], +) diff --git a/backend/dataall/core/permissions/db/__init__.py b/backend/dataall/core/permissions/db/__init__.py new file mode 100644 index 000000000..6a5c2fcef --- /dev/null +++ b/backend/dataall/core/permissions/db/__init__.py @@ -0,0 +1,14 @@ +import logging + +from dataall.core.permissions.db import permission_models +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.tenant_repositories import Tenant + +log = logging.getLogger("Permissions") + + +def save_permissions_with_tenant(engine, envname=None): + with engine.scoped_session() as session: + log.info('Initiating permissions') + Tenant.save_tenant(session, name='dataall', description='Tenant dataall') + Permission.init_permissions(session) diff --git a/backend/dataall/core/permissions/db/group_policy_repositories.py b/backend/dataall/core/permissions/db/group_policy_repositories.py new file mode 100644 index 000000000..7a916bf4c --- /dev/null +++ b/backend/dataall/core/permissions/db/group_policy_repositories.py @@ -0,0 +1,50 @@ +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.base.db.exceptions import UnauthorizedOperation + + +class GroupPolicy: + """Checks permission of environment group""" + @staticmethod + def check_group_environment_permission( + session, username, groups, uri, group, permission_name + ): + GroupPolicy.check_group_environment_membership( + session=session, + username=username, + user_groups=groups, + group=group, + environment_uri=uri, + permission_name=permission_name, + ) + + ResourcePolicy.check_user_resource_permission( + session=session, + username=username, + groups=[group], + resource_uri=uri, + permission_name=permission_name, + ) + + @staticmethod + def check_group_environment_membership( + session, environment_uri, group, username, user_groups, permission_name + ): + if group and group not in user_groups: + raise UnauthorizedOperation( + action=permission_name, + message=f'User: {username} is not a member of the team {group}', + ) + + belongs_to_env = ( + session.query(EnvironmentGroup) + .filter(EnvironmentGroup.environmentUri == environment_uri) + .filter(EnvironmentGroup.groupUri.in_([group])) + .count() + ) + + if not belongs_to_env: + raise UnauthorizedOperation( + action=permission_name, + message=f'Team: {group} is not a member of the environment {environment_uri}', + ) diff --git a/backend/dataall/core/permissions/db/permission_models.py b/backend/dataall/core/permissions/db/permission_models.py new file mode 100644 index 000000000..fe29d2d20 --- /dev/null +++ b/backend/dataall/core/permissions/db/permission_models.py @@ -0,0 +1,98 @@ +import datetime +import enum + +from sqlalchemy import Column, String, DateTime, ForeignKey, Enum as DBEnum +from sqlalchemy.orm import relationship + +from dataall.base.db import Base, utils + + +class PermissionType(enum.Enum): + TENANT = 'TENANT' + RESOURCE = 'RESOURCE' + + +class Permission(Base): + __tablename__ = 'permission' + permissionUri = Column(String, primary_key=True, default=utils.uuid('permission')) + name = Column(String, nullable=False, index=True) + type = Column(DBEnum(PermissionType), nullable=False) + description = Column(String, nullable=False) + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + +class TenantPolicy(Base): + __tablename__ = 'tenant_policy' + + sid = Column(String, primary_key=True, default=utils.uuid('tenant_policy')) + + tenantUri = Column(String, ForeignKey('tenant.tenantUri'), nullable=False) + tenant = relationship('Tenant') + + principalId = Column(String, nullable=False, index=True) + principalType = Column( + DBEnum('USER', 'GROUP', 'SERVICE', name='tenant_principal_type'), + default='GROUP', + ) + + permissions = relationship( + 'TenantPolicyPermission', uselist=True, backref='tenant_policy' + ) + + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + +class TenantPolicyPermission(Base): + __tablename__ = 'tenant_policy_permission' + + sid = Column(String, ForeignKey(TenantPolicy.sid), primary_key=True) + permissionUri = Column( + String, ForeignKey(Permission.permissionUri), primary_key=True + ) + permission = relationship('Permission') + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + +class ResourcePolicy(Base): + __tablename__ = 'resource_policy' + + sid = Column(String, primary_key=True, default=utils.uuid('resource_policy')) + + resourceUri = Column(String, nullable=False, index=True) + resourceType = Column(String, nullable=False, index=True) + + principalId = Column(String, nullable=False, index=True) + principalType = Column( + DBEnum('USER', 'GROUP', 'SERVICE', name='rp_principal_type'), default='GROUP' + ) + + permissions = relationship( + 'ResourcePolicyPermission', uselist=True, backref='resource_policy' + ) + + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + +class ResourcePolicyPermission(Base): + __tablename__ = 'resource_policy_permission' + + sid = Column(String, ForeignKey(ResourcePolicy.sid), primary_key=True) + permissionUri = Column( + String, ForeignKey(Permission.permissionUri), primary_key=True + ) + permission = relationship('Permission') + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + +class Tenant(Base): + __tablename__ = 'tenant' + tenantUri = Column(String, primary_key=True, default=utils.uuid('tenant')) + name = Column(String, nullable=False, index=True, unique=True) + description = Column(String, default='No description provided') + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/core/permissions/db/permission_repositories.py b/backend/dataall/core/permissions/db/permission_repositories.py new file mode 100644 index 000000000..c77ae6ab9 --- /dev/null +++ b/backend/dataall/core/permissions/db/permission_repositories.py @@ -0,0 +1,134 @@ +import logging + +from dataall.core.permissions.db.permission_models import PermissionType +from dataall.base.db import exceptions +from dataall.core.permissions import permissions +from dataall.core.permissions.db import permission_models as models + + +logger = logging.getLogger(__name__) + + +class Permission: + @staticmethod + def find_permission_by_name( + session, permission_name: str, permission_type: str + ) -> models.Permission: + if permission_name: + permission = ( + session.query(models.Permission) + .filter( + models.Permission.name == permission_name, + models.Permission.type == permission_type, + ) + .first() + ) + return permission + + @staticmethod + def get_permission_by_name( + session, permission_name: str, permission_type: str + ) -> models.Permission: + if not permission_name: + raise exceptions.RequiredParameter(param_name='permission_name') + permission = Permission.find_permission_by_name( + session, permission_name, permission_type + ) + if not permission: + raise exceptions.ObjectNotFound('Permission', permission_name) + return permission + + @staticmethod + def find_permission_by_uri( + session, permission_uri: str, permission_type: str + ) -> models.Permission: + if permission_uri: + permission = ( + session.query(models.Permission) + .filter( + models.Permission.permissionUri == permission_uri, + models.Permission.type == permission_type, + ) + .first() + ) + return permission + + @staticmethod + def get_permission_by_uri( + session, permission_uri: str, permission_type: str + ) -> models.Permission: + if not permission_uri: + raise exceptions.RequiredParameter(param_name='permission_uri') + permission = Permission.find_permission_by_uri( + session, permission_uri, permission_type + ) + if not permission: + raise exceptions.ObjectNotFound('Permission', permission_uri) + return permission + + @staticmethod + def save_permission( + session, name: str, description: str, permission_type: str + ) -> models.Permission: + if not name: + raise exceptions.RequiredParameter('name') + if not type: + raise exceptions.RequiredParameter('permission_type') + permission = Permission.find_permission_by_name(session, name, permission_type) + if permission: + logger.info(f'Permission {permission.name} already exists') + else: + permission = models.Permission( + name=name, + description=description if description else f'Allows {name}', + type=permission_type, + ) + session.add(permission) + return permission + + @staticmethod + def init_permissions(session): + perms = [] + count_resource_permissions = ( + session.query(models.Permission) + .filter(models.Permission.type == PermissionType.RESOURCE.name) + .count() + ) + + logger.debug(f'count_resource_permissions: {count_resource_permissions}, RESOURCES_ALL: {len(permissions.RESOURCES_ALL_WITH_DESC)}') + + if count_resource_permissions < len(permissions.RESOURCES_ALL_WITH_DESC): + for name, desc in permissions.RESOURCES_ALL_WITH_DESC.items(): + perms.append( + Permission.save_permission( + session, + name=name, + description=desc, + permission_type=PermissionType.RESOURCE.name, + ) + ) + logger.info(f'Saved permission {name} successfully') + logger.info(f'Saved {len(perms)} resource permissions successfully') + + count_tenant_permissions = ( + session.query(models.Permission) + .filter(models.Permission.type == PermissionType.TENANT.name) + .count() + ) + + logger.debug(f'count_tenant_permissions: {count_tenant_permissions}, TENANT_ALL: {len(permissions.TENANT_ALL_WITH_DESC)}') + + if count_tenant_permissions < len(permissions.TENANT_ALL_WITH_DESC): + for name, desc in permissions.TENANT_ALL_WITH_DESC.items(): + perms.append( + Permission.save_permission( + session, + name=name, + description=desc, + permission_type=PermissionType.TENANT.name, + ) + ) + logger.info(f'Saved permission {name} successfully') + logger.info(f'Saved {len(perms)} permissions successfully') + session.commit() + return perms diff --git a/backend/dataall/core/permissions/db/resource_policy_repositories.py b/backend/dataall/core/permissions/db/resource_policy_repositories.py new file mode 100644 index 000000000..7c93011b5 --- /dev/null +++ b/backend/dataall/core/permissions/db/resource_policy_repositories.py @@ -0,0 +1,275 @@ +import logging +from typing import Optional + +from sqlalchemy.sql import and_ + +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.permission_models import PermissionType +from dataall.base.db import exceptions +from dataall.core.permissions.db import permission_models as models + +logger = logging.getLogger(__name__) + + +class ResourcePolicy: + @staticmethod + def check_user_resource_permission( + session, username: str, groups: [str], resource_uri: str, permission_name: str + ): + resource_policy = ResourcePolicy.has_user_resource_permission( + session=session, + username=username, + groups=groups, + permission_name=permission_name, + resource_uri=resource_uri, + ) + if not resource_policy: + raise exceptions.ResourceUnauthorized( + username=username, + action=permission_name, + resource_uri=resource_uri, + ) + else: + return resource_policy + + @staticmethod + def has_user_resource_permission( + session, username: str, groups: [str], resource_uri: str, permission_name: str + ) -> Optional[models.ResourcePolicy]: + + if not username or not permission_name or not resource_uri: + return None + + policy: models.ResourcePolicy = ( + session.query(models.ResourcePolicy) + .join( + models.ResourcePolicyPermission, + models.ResourcePolicy.sid == models.ResourcePolicyPermission.sid, + ) + .join( + models.Permission, + models.Permission.permissionUri + == models.ResourcePolicyPermission.permissionUri, + ) + .filter( + and_( + models.ResourcePolicy.principalId.in_(groups), + models.ResourcePolicy.principalType == 'GROUP', + models.Permission.name == permission_name, + models.ResourcePolicy.resourceUri == resource_uri, + ) + ) + .first() + ) + + if not policy: + return None + else: + return policy + + @staticmethod + def has_group_resource_permission( + session, group_uri: str, resource_uri: str, permission_name: str + ) -> Optional[models.ResourcePolicy]: + + if not group_uri or not permission_name or not resource_uri: + return None + + policy: models.ResourcePolicy = ( + session.query(models.ResourcePolicy) + .join( + models.ResourcePolicyPermission, + models.ResourcePolicy.sid == models.ResourcePolicyPermission.sid, + ) + .join( + models.Permission, + models.Permission.permissionUri + == models.ResourcePolicyPermission.permissionUri, + ) + .filter( + and_( + models.ResourcePolicy.principalId == group_uri, + models.ResourcePolicy.principalType == 'GROUP', + models.Permission.name == permission_name, + models.ResourcePolicy.resourceUri == resource_uri, + ) + ) + .first() + ) + + if not policy: + return None + else: + return policy + + @staticmethod + def find_resource_policy( + session, group_uri: str, resource_uri: str + ) -> models.ResourcePolicy: + if not group_uri: + raise exceptions.RequiredParameter(param_name='group') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + resource_policy = ( + session.query(models.ResourcePolicy) + .filter( + and_( + models.ResourcePolicy.principalId == group_uri, + models.ResourcePolicy.resourceUri == resource_uri, + ) + ) + .first() + ) + return resource_policy + + @staticmethod + def attach_resource_policy( + session, + group: str, + permissions: [str], + resource_uri: str, + resource_type: str, + ) -> models.ResourcePolicy: + + ResourcePolicy.validate_attach_resource_policy_params( + group, permissions, resource_uri, resource_type + ) + + policy = ResourcePolicy.save_resource_policy( + session, group, resource_uri, resource_type + ) + + ResourcePolicy.add_permission_to_resource_policy( + session, group, permissions, resource_uri, policy + ) + + return policy + + @staticmethod + def delete_resource_policy( + session, + group: str, + resource_uri: str, + resource_type: str = None, + ) -> bool: + + ResourcePolicy.validate_delete_resource_policy_params(group, resource_uri) + policy = ResourcePolicy.find_resource_policy( + session, group_uri=group, resource_uri=resource_uri + ) + if policy: + for permission in policy.permissions: + session.delete(permission) + session.delete(policy) + session.commit() + + return True + + @staticmethod + def validate_attach_resource_policy_params( + group, permissions, resource_uri, resource_type + ): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not permissions: + raise exceptions.RequiredParameter(param_name='permissions') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + if not resource_type: + raise exceptions.RequiredParameter(param_name='resource_type') + + @staticmethod + def save_resource_policy(session, group, resource_uri, resource_type): + ResourcePolicy.validate_save_resource_policy_params( + group, resource_uri, resource_type + ) + policy = ResourcePolicy.find_resource_policy(session, group, resource_uri) + if not policy: + policy = models.ResourcePolicy( + principalId=group, + principalType='GROUP', + resourceUri=resource_uri, + resourceType=resource_type, + ) + session.add(policy) + session.commit() + return policy + + @staticmethod + def validate_save_resource_policy_params(group, resource_uri, resource_type): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + if not resource_type: + raise exceptions.RequiredParameter(param_name='resource_type') + + @staticmethod + def add_permission_to_resource_policy( + session, group, permissions, resource_uri, policy + ): + ResourcePolicy.validate_add_permission_to_resource_policy_params( + group, permissions, policy, resource_uri + ) + + for permission in permissions: + if not ResourcePolicy.has_group_resource_permission( + session, + group_uri=group, + permission_name=permission, + resource_uri=resource_uri, + ): + ResourcePolicy.associate_permission_to_resource_policy( + session, policy, permission + ) + + @staticmethod + def validate_add_permission_to_resource_policy_params( + group, permissions, policy, resource_uri + ): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not permissions: + raise exceptions.RequiredParameter(param_name='permissions') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + if not policy: + raise exceptions.RequiredParameter(param_name='policy') + + @staticmethod + def validate_delete_resource_policy_params(group, resource_uri): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + + @staticmethod + def associate_permission_to_resource_policy(session, policy, permission): + if not policy: + raise exceptions.RequiredParameter(param_name='policy') + if not permission: + raise exceptions.RequiredParameter(param_name='permission') + policy_permission = models.ResourcePolicyPermission( + sid=policy.sid, + permissionUri=Permission.get_permission_by_name( + session, permission, permission_type=PermissionType.RESOURCE.name + ).permissionUri, + ) + session.add(policy_permission) + session.commit() + + @staticmethod + def get_resource_policy_permissions(session, group_uri, resource_uri): + if not group_uri: + raise exceptions.RequiredParameter(param_name='group_uri') + if not resource_uri: + raise exceptions.RequiredParameter(param_name='resource_uri') + policy = ResourcePolicy.find_resource_policy( + session=session, + group_uri=group_uri, + resource_uri=resource_uri, + ) + permissions = [] + for p in policy.permissions: + permissions.append(p.permission) + return permissions diff --git a/backend/dataall/core/permissions/db/tenant_policy_repositories.py b/backend/dataall/core/permissions/db/tenant_policy_repositories.py new file mode 100644 index 000000000..af627bb67 --- /dev/null +++ b/backend/dataall/core/permissions/db/tenant_policy_repositories.py @@ -0,0 +1,403 @@ +import logging + +from sqlalchemy.sql import and_ + +from dataall.core.permissions.db.permission_models import PermissionType +from dataall.base.db import exceptions, paginate +from dataall.core.permissions import permissions +from dataall.core.permissions.db import permission_models as models +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.tenant_repositories import Tenant as TenantService + +logger = logging.getLogger(__name__) + +TENANT_NAME = 'dataall' + + +class TenantPolicy: + @staticmethod + def is_tenant_admin(groups: [str]): + if not groups: + return False + + if 'DAAdministrators' in groups: + return True + + return False + + @staticmethod + def check_user_tenant_permission( + session, username: str, groups: [str], tenant_name: str, permission_name: str + ): + if TenantPolicy.is_tenant_admin(groups): + return True + + tenant_policy = TenantPolicy.has_user_tenant_permission( + session=session, + username=username, + groups=groups, + permission_name=permission_name, + tenant_name=tenant_name, + ) + + if not tenant_policy: + raise exceptions.TenantUnauthorized( + username=username, + action=permission_name, + tenant_name=tenant_name, + ) + + else: + return tenant_policy + + @staticmethod + def has_user_tenant_permission( + session, username: str, groups: [str], tenant_name: str, permission_name: str + ): + if not username or not permission_name: + return False + tenant_policy: models.TenantPolicy = ( + session.query(models.TenantPolicy) + .join( + models.TenantPolicyPermission, + models.TenantPolicy.sid == models.TenantPolicyPermission.sid, + ) + .join( + models.Tenant, + models.Tenant.tenantUri == models.TenantPolicy.tenantUri, + ) + .join( + models.Permission, + models.Permission.permissionUri + == models.TenantPolicyPermission.permissionUri, + ) + .filter( + models.TenantPolicy.principalId.in_(groups), + models.Permission.name == permission_name, + models.Tenant.name == tenant_name, + ) + .first() + ) + return tenant_policy + + @staticmethod + def has_group_tenant_permission( + session, group_uri: str, tenant_name: str, permission_name: str + ): + if not group_uri or not permission_name: + return False + + tenant_policy: models.TenantPolicy = ( + session.query(models.TenantPolicy) + .join( + models.TenantPolicyPermission, + models.TenantPolicy.sid == models.TenantPolicyPermission.sid, + ) + .join( + models.Tenant, + models.Tenant.tenantUri == models.TenantPolicy.tenantUri, + ) + .join( + models.Permission, + models.Permission.permissionUri + == models.TenantPolicyPermission.permissionUri, + ) + .filter( + and_( + models.TenantPolicy.principalId == group_uri, + models.Permission.name == permission_name, + models.Tenant.name == tenant_name, + ) + ) + .first() + ) + + if not tenant_policy: + return False + else: + return tenant_policy + + @staticmethod + def find_tenant_policy(session, group_uri: str, tenant_name: str): + TenantPolicy.validate_find_tenant_policy(group_uri, tenant_name) + + tenant_policy = ( + session.query(models.TenantPolicy) + .join( + models.Tenant, models.Tenant.tenantUri == models.TenantPolicy.tenantUri + ) + .filter( + and_( + models.TenantPolicy.principalId == group_uri, + models.Tenant.name == tenant_name, + ) + ) + .first() + ) + return tenant_policy + + @staticmethod + def validate_find_tenant_policy(group_uri, tenant_name): + if not group_uri: + raise exceptions.RequiredParameter(param_name='group_uri') + if not tenant_name: + raise exceptions.RequiredParameter(param_name='tenant_name') + + @staticmethod + def attach_group_tenant_policy( + session, + group: str, + permissions: [str], + tenant_name: str, + ) -> models.TenantPolicy: + + TenantPolicy.validate_attach_tenant_policy(group, permissions, tenant_name) + + policy = TenantPolicy.save_group_tenant_policy(session, group, tenant_name) + + TenantPolicy.add_permission_to_group_tenant_policy( + session, group, permissions, tenant_name, policy + ) + + return policy + + @staticmethod + def validate_attach_tenant_policy(group, permissions, tenant_name): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not permissions: + raise exceptions.RequiredParameter(param_name='permissions') + if not tenant_name: + raise exceptions.RequiredParameter(param_name='tenant_name') + + @staticmethod + def save_group_tenant_policy(session, group, tenant_name): + + TenantPolicy.validate_save_tenant_policy(group, tenant_name) + + policy = TenantPolicy.find_tenant_policy(session, group, tenant_name) + if not policy: + policy = models.TenantPolicy( + principalId=group, + principalType='GROUP', + tenant=TenantService.get_tenant_by_name(session, tenant_name), + ) + session.add(policy) + session.commit() + return policy + + @staticmethod + def validate_save_tenant_policy(group, tenant_name): + if not group: + raise exceptions.RequiredParameter(param_name='group') + if not tenant_name: + raise exceptions.RequiredParameter(param_name='tenant_name') + + @staticmethod + def add_permission_to_group_tenant_policy( + session, group, permissions, tenant_name, policy + ): + TenantPolicy.validate_add_permission_to_tenant_policy_params( + group, permissions, policy, tenant_name + ) + + for permission in permissions: + if not TenantPolicy.has_group_tenant_permission( + session, + group_uri=group, + permission_name=permission, + tenant_name=tenant_name, + ): + TenantPolicy.associate_permission_to_tenant_policy( + session, policy, permission + ) + + @staticmethod + def validate_add_permission_to_tenant_policy_params( + group, permissions, policy, tenant_name + ): + if not group: + raise exceptions.RequiredParameter(param_name='group') + TenantPolicy.validate_add_permissions_params(permissions, policy, tenant_name) + + @staticmethod + def validate_add_permissions_params(permissions, policy, tenant_name): + if not permissions: + raise exceptions.RequiredParameter(param_name='permissions') + if not tenant_name: + raise exceptions.RequiredParameter(param_name='tenant_name') + if not policy: + raise exceptions.RequiredParameter(param_name='policy') + + @staticmethod + def associate_permission_to_tenant_policy(session, policy, permission): + policy_permission = models.TenantPolicyPermission( + sid=policy.sid, + permissionUri=Permission.get_permission_by_name( + session, permission, PermissionType.TENANT.name + ).permissionUri, + ) + session.add(policy_permission) + session.commit() + + @staticmethod + def get_tenant_policy_permissions(session, group_uri, tenant_name): + if not group_uri: + raise exceptions.RequiredParameter(param_name='group_uri') + if not tenant_name: + raise exceptions.RequiredParameter(param_name='tenant_name') + policy = TenantPolicy.find_tenant_policy( + session=session, + group_uri=group_uri, + tenant_name=tenant_name, + ) + permissions = [] + for p in policy.permissions: + permissions.append(p.permission) + return permissions + + @staticmethod + def delete_tenant_policy( + session, + group: str, + tenant_name: str, + ) -> bool: + + policy = TenantPolicy.find_tenant_policy( + session, group_uri=group, tenant_name=tenant_name + ) + if policy: + for permission in policy.permissions: + session.delete(permission) + session.delete(policy) + session.commit() + + return True + + @staticmethod + def list_group_tenant_permissions( + session, username, groups, uri, data=None, check_perm=None + ): + if not groups: + raise exceptions.RequiredParameter('groups') + if not uri: + raise exceptions.RequiredParameter('groupUri') + + if not TenantPolicy.is_tenant_admin(groups): + raise exceptions.UnauthorizedOperation( + action='LIST_TENANT_TEAM_PERMISSIONS', + message=f'User: {username} is not allowed to manage tenant permissions', + ) + + return TenantPolicy.get_tenant_policy_permissions( + session=session, + group_uri=uri, + tenant_name='dataall', + ) + + @staticmethod + def list_tenant_groups(session, username, groups, uri, data=None, check_perm=None): + if not groups: + raise exceptions.RequiredParameter('groups') + + if not TenantPolicy.is_tenant_admin(groups): + raise exceptions.UnauthorizedOperation( + action='LIST_TENANT_TEAMS', + message=f'User: {username} is not allowed to manage tenant permissions', + ) + + query = session.query( + models.TenantPolicy.principalId.label('name'), + models.TenantPolicy.principalId.label('groupUri'), + ).filter( + and_( + models.TenantPolicy.principalType == 'GROUP', + models.TenantPolicy.principalId != 'DAAdministrators', + ) + ) + + if data and data.get('term'): + query = query.filter( + models.TenantPolicy.principalId.ilike('%' + data.get('term') + '%') + ) + + return paginate( + query=query, + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def list_tenant_permissions(session, username, groups): + if not TenantPolicy.is_tenant_admin(groups): + raise exceptions.UnauthorizedOperation( + action='LIST_TENANT_TEAM_PERMISSIONS', + message=f'User: {username} is not allowed to manage tenant permissions', + ) + group_invitation_permissions = [] + for p in permissions.TENANT_ALL: + group_invitation_permissions.append( + Permission.find_permission_by_name( + session=session, + permission_name=p, + permission_type=PermissionType.TENANT.name, + ) + ) + return group_invitation_permissions + + @staticmethod + def update_group_permissions( + session, username, groups, uri, data=None, check_perm=None + ): + TenantPolicy.validate_params(data) + + if not TenantPolicy.is_tenant_admin(groups): + exceptions.UnauthorizedOperation( + action='UPDATE_TENANT_TEAM_PERMISSIONS', + message=f'User: {username} is not allowed to manage tenant permissions', + ) + + TenantPolicy.validate_permissions( + session, TENANT_NAME, data['permissions'], uri + ) + + TenantPolicy.delete_tenant_policy( + session=session, group=uri, tenant_name=TENANT_NAME + ) + TenantPolicy.attach_group_tenant_policy( + session=session, + group=uri, + permissions=data['permissions'], + tenant_name=TENANT_NAME, + ) + + return True + + @staticmethod + def validate_permissions(session, tenant_name, g_permissions, group): + g_permissions = list(set(g_permissions)) + + if g_permissions not in permissions.TENANT_ALL: + exceptions.TenantPermissionUnauthorized( + action='UPDATE_TENANT_TEAM_PERMISSIONS', + group_name=group, + tenant_name=tenant_name, + ) + + tenant_group_permissions = [] + for p in g_permissions: + tenant_group_permissions.append( + Permission.find_permission_by_name( + session=session, + permission_name=p, + permission_type=PermissionType.TENANT.name, + ) + ) + return tenant_group_permissions + + @staticmethod + def validate_params(data): + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('permissions'): + raise exceptions.RequiredParameter('permissions') diff --git a/backend/dataall/core/permissions/db/tenant_repositories.py b/backend/dataall/core/permissions/db/tenant_repositories.py new file mode 100644 index 000000000..a3d8aa331 --- /dev/null +++ b/backend/dataall/core/permissions/db/tenant_repositories.py @@ -0,0 +1,42 @@ +import logging + +from dataall.core.permissions.db import permission_models as models + +logger = logging.getLogger(__name__) + + +class Tenant: + @staticmethod + def find_tenant_by_name(session, tenant_name: str) -> models.Tenant: + if tenant_name: + tenant = ( + session.query(models.Tenant) + .filter(models.Tenant.name == tenant_name) + .first() + ) + return tenant + + @staticmethod + def get_tenant_by_name(session, tenant_name: str) -> models.Tenant: + if not tenant_name: + raise Exception('Tenant name is required') + tenant = Tenant.find_tenant_by_name(session, tenant_name) + if not tenant: + raise Exception('TenantNotFound') + return tenant + + @staticmethod + def save_tenant(session, name: str, description: str) -> models.Tenant: + if not name: + raise Exception('Tenant name is required') + + tenant = Tenant.find_tenant_by_name(session, name) + if tenant: + return tenant + else: + tenant = models.Tenant( + name=name, description=description if description else f'Tenant {name}' + ) + session.add(tenant) + session.commit() + return tenant diff --git a/backend/dataall/core/permissions/permission_checker.py b/backend/dataall/core/permissions/permission_checker.py new file mode 100644 index 000000000..a6a8e13bc --- /dev/null +++ b/backend/dataall/core/permissions/permission_checker.py @@ -0,0 +1,101 @@ +""" +Contains decorators that check if user has a permission to access +and interact with resources or do some actions in the app +""" +from typing import Protocol, Callable + +from dataall.base.context import RequestContext, get_context +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.base.utils.decorator_utls import process_func + + +class Identifiable(Protocol): + """Protocol to identify resources for checking permissions""" + def get_resource_uri(self) -> str: + ... + + +def _check_tenant_permission(session, permission): + context: RequestContext = get_context() + TenantPolicy.check_user_tenant_permission( + session=session, + username=context.username, + groups=context.groups, + tenant_name='dataall', + permission_name=permission + ) + + +def _check_resource_permission(session, uri, permission): + context: RequestContext = get_context() + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=uri, + permission_name=permission, + ) + + +def has_resource_permission( + permission: str, + param_name: str = None, + resource_name: str = None, + parent_resource: Callable = None +): + """ + Decorator that check if a user has access to the resource. + The method or function decorated with this decorator must have a URI of accessing resource + Good rule of thumb: if there is a URI that accesses a specific resource, + hence it has URI - it must be decorated with this decorator + """ + if not param_name: + param_name = "uri" + + def decorator(f): + fn, fn_decorator = process_func(f) + + def decorated(*args, **kwargs): + uri: str + if resource_name: + resource: Identifiable = kwargs[resource_name] + uri = resource.get_resource_uri() + else: + if param_name not in kwargs: + raise KeyError(f"{f.__name__} doesn't have parameter {param_name}") + uri = kwargs[param_name] + + with get_context().db_engine.scoped_session() as session: + if parent_resource: + try: + uri = parent_resource(session, uri) + except TypeError: + uri = parent_resource.__func__(session, uri) + + _check_resource_permission(session, uri, permission) + + return fn(*args, **kwargs) + + return fn_decorator(decorated) + + return decorator + + +def has_tenant_permission(permission: str): + """ + Decorator to check if a user has a permission to do some action. + All the information about the user is retrieved from RequestContext + """ + def decorator(f): + fn, fn_decorator = process_func(f) + + def decorated(*args, **kwargs): + with get_context().db_engine.scoped_session() as session: + _check_tenant_permission(session, permission) + + return fn(*args, **kwargs) + + return fn_decorator(decorated) + + return decorator diff --git a/backend/dataall/core/permissions/permissions.py b/backend/dataall/core/permissions/permissions.py new file mode 100644 index 000000000..8f94cde3b --- /dev/null +++ b/backend/dataall/core/permissions/permissions.py @@ -0,0 +1,152 @@ +""" +ORGANIZATION PERMISSIONS +""" +CREATE_ORGANIZATION = 'CREATE_ORGANIZATION' +UPDATE_ORGANIZATION = 'UPDATE_ORGANIZATION' +DELETE_ORGANIZATION = 'DELETE_ORGANIZATION' +GET_ORGANIZATION = 'GET_ORGANIZATION' +LINK_ENVIRONMENT = 'LINK_ENVIRONMENT' +INVITE_ORGANIZATION_GROUP = 'INVITE_ORGANIZATION_GROUP' +REMOVE_ORGANIZATION_GROUP = 'REMOVE_ORGANIZATION_GROUP' +ORGANIZATION_ALL = [ + CREATE_ORGANIZATION, + UPDATE_ORGANIZATION, + DELETE_ORGANIZATION, + LINK_ENVIRONMENT, + GET_ORGANIZATION, + INVITE_ORGANIZATION_GROUP, + REMOVE_ORGANIZATION_GROUP, +] +ORGANIZATION_INVITED = [LINK_ENVIRONMENT, GET_ORGANIZATION] + +""" +TENANT PERMISSIONS +""" +MANAGE_GROUPS = 'MANAGE_GROUPS' +MANAGE_ENVIRONMENT = 'MANAGE_ENVIRONMENT' +MANAGE_GLOSSARIES = 'MANAGE_GLOSSARIES' +MANAGE_ENVIRONMENTS = 'MANAGE_ENVIRONMENTS' +MANAGE_ORGANIZATIONS = 'MANAGE_ORGANIZATIONS' +MANAGE_SGMSTUDIO_NOTEBOOKS = 'MANAGE_SGMSTUDIO_NOTEBOOKS' + +""" +ENVIRONMENT +""" +UPDATE_ENVIRONMENT = 'UPDATE_ENVIRONMENT' +GET_ENVIRONMENT = 'GET_ENVIRONMENT' +DELETE_ENVIRONMENT = 'DELETE_ENVIRONMENT' +INVITE_ENVIRONMENT_GROUP = 'INVITE_ENVIRONMENT_GROUP' +REMOVE_ENVIRONMENT_GROUP = 'REMOVE_ENVIRONMENT_GROUP' +UPDATE_ENVIRONMENT_GROUP = 'UPDATE_ENVIRONMENT_GROUP' +ADD_ENVIRONMENT_CONSUMPTION_ROLES = 'ADD_ENVIRONMENT_CONSUMPTION_ROLES' +LIST_ENVIRONMENT_CONSUMPTION_ROLES = 'LIST_ENVIRONMENT_CONSUMPTION_ROLES' +LIST_ENVIRONMENT_GROUP_PERMISSIONS = 'LIST_ENVIRONMENT_GROUP_PERMISSIONS' +LIST_ENVIRONMENT_GROUPS = 'LIST_ENVIRONMENT_GROUPS' +CREDENTIALS_ENVIRONMENT = 'CREDENTIALS_ENVIRONMENT' +ENABLE_ENVIRONMENT_SUBSCRIPTIONS = 'ENABLE_ENVIRONMENT_SUBSCRIPTIONS' +DISABLE_ENVIRONMENT_SUBSCRIPTIONS = 'DISABLE_ENVIRONMENT_SUBSCRIPTIONS' +CREATE_NETWORK = 'CREATE_NETWORK' +LIST_ENVIRONMENT_NETWORKS = 'LIST_ENVIRONMENT_NETWORKS' + + +ENVIRONMENT_INVITED = [ + LIST_ENVIRONMENT_GROUP_PERMISSIONS, + GET_ENVIRONMENT, + LIST_ENVIRONMENT_GROUPS, + LIST_ENVIRONMENT_CONSUMPTION_ROLES, + INVITE_ENVIRONMENT_GROUP, + ADD_ENVIRONMENT_CONSUMPTION_ROLES, + CREATE_NETWORK, + LIST_ENVIRONMENT_NETWORKS, +] +ENVIRONMENT_INVITATION_REQUEST = [ + INVITE_ENVIRONMENT_GROUP, + ADD_ENVIRONMENT_CONSUMPTION_ROLES, + CREATE_NETWORK, +] +ENVIRONMENT_ALL = [ + UPDATE_ENVIRONMENT, + GET_ENVIRONMENT, + DELETE_ENVIRONMENT, + INVITE_ENVIRONMENT_GROUP, + REMOVE_ENVIRONMENT_GROUP, + UPDATE_ENVIRONMENT_GROUP, + LIST_ENVIRONMENT_GROUP_PERMISSIONS, + ADD_ENVIRONMENT_CONSUMPTION_ROLES, + LIST_ENVIRONMENT_CONSUMPTION_ROLES, + LIST_ENVIRONMENT_GROUPS, + CREDENTIALS_ENVIRONMENT, + ENABLE_ENVIRONMENT_SUBSCRIPTIONS, + DISABLE_ENVIRONMENT_SUBSCRIPTIONS, + CREATE_NETWORK, + LIST_ENVIRONMENT_NETWORKS, +] +""" +CONSUMPTION_ROLE +""" +REMOVE_ENVIRONMENT_CONSUMPTION_ROLE = 'REMOVE_ENVIRONMENT_CONSUMPTION_ROLE' +CONSUMPTION_ENVIRONMENT_ROLE_ALL = [ + LIST_ENVIRONMENT_CONSUMPTION_ROLES, + ADD_ENVIRONMENT_CONSUMPTION_ROLES +] +CONSUMPTION_ROLE_ALL = [ + REMOVE_ENVIRONMENT_CONSUMPTION_ROLE +] + +""" +GLOSSARIES +""" +CREATE_CATEGORY = 'CREATE_CATEGORY' +CREATE_TERM = 'CREATE_TERM' +UPDATE_NODE = 'UPDATE_NODE' +DELETE_GLOSSARY = 'DELETE_GLOSSARY' +APPROVE_ASSOCIATION = 'APPROVE_ASSOCIATION' +GLOSSARY_ALL = [ + CREATE_CATEGORY, + CREATE_TERM, + UPDATE_NODE, + DELETE_GLOSSARY, + APPROVE_ASSOCIATION, +] +""" +TENANT ALL +""" + +TENANT_ALL = [ + MANAGE_GLOSSARIES, + MANAGE_GROUPS, + MANAGE_ENVIRONMENTS, + MANAGE_ORGANIZATIONS, + MANAGE_SGMSTUDIO_NOTEBOOKS, +] + +TENANT_ALL_WITH_DESC = {k: k for k in TENANT_ALL} +TENANT_ALL_WITH_DESC[MANAGE_GLOSSARIES] = 'Manage glossaries' +TENANT_ALL_WITH_DESC[MANAGE_ENVIRONMENTS] = 'Manage environments' +TENANT_ALL_WITH_DESC[MANAGE_GROUPS] = 'Manage teams' +TENANT_ALL_WITH_DESC[MANAGE_ORGANIZATIONS] = 'Manage organizations' +TENANT_ALL_WITH_DESC[MANAGE_SGMSTUDIO_NOTEBOOKS] = 'Manage ML studio notebooks' + +""" +NETWORKS +""" +GET_NETWORK = 'GET_NETWORK' +UPDATE_NETWORK = 'UPDATE_NETWORK' +DELETE_NETWORK = 'DELETE_NETWORK' +NETWORK_ALL = [GET_NETWORK, UPDATE_NETWORK, DELETE_NETWORK] + +""" +RESOURCES_ALL +""" +RESOURCES_ALL = ( + ORGANIZATION_ALL + + ENVIRONMENT_ALL + + CONSUMPTION_ROLE_ALL + + GLOSSARY_ALL + + NETWORK_ALL +) + +RESOURCES_ALL_WITH_DESC = {k: k for k in RESOURCES_ALL} +RESOURCES_ALL_WITH_DESC[INVITE_ENVIRONMENT_GROUP] = 'Invite other teams to this environment' +RESOURCES_ALL_WITH_DESC[ADD_ENVIRONMENT_CONSUMPTION_ROLES] = 'Add IAM consumption roles to this environment' +RESOURCES_ALL_WITH_DESC[CREATE_NETWORK] = 'Create networks on this environment' diff --git a/backend/dataall/core/stacks/__init__.py b/backend/dataall/core/stacks/__init__.py new file mode 100644 index 000000000..43caaaa2a --- /dev/null +++ b/backend/dataall/core/stacks/__init__.py @@ -0,0 +1 @@ +from dataall.core.stacks import api, handlers diff --git a/backend/dataall/core/stacks/api/__init__.py b/backend/dataall/core/stacks/api/__init__.py new file mode 100644 index 000000000..2c28a23db --- /dev/null +++ b/backend/dataall/core/stacks/api/__init__.py @@ -0,0 +1,10 @@ +from . import ( + input_types, + mutations, + queries, + resolvers, + stack_helper, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations', 'stack_helper'] diff --git a/backend/dataall/core/stacks/api/input_types.py b/backend/dataall/core/stacks/api/input_types.py new file mode 100644 index 000000000..a71d64ed8 --- /dev/null +++ b/backend/dataall/core/stacks/api/input_types.py @@ -0,0 +1,19 @@ +from dataall.base.api import gql + +KeyValueTagInput = gql.InputType( + name='KeyValueTagInput', + arguments=[ + gql.Argument(name='key', type=gql.NonNullableType(gql.String)), + gql.Argument(name='value', type=gql.NonNullableType(gql.String)), + gql.Argument(name='cascade', type=gql.NonNullableType(gql.Boolean)), + ], +) + +UpdateKeyValueTagsInput = gql.InputType( + name='UpdateKeyValueTagsInput', + arguments=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + gql.Argument(name='tags', type=gql.ArrayType(gql.Ref('KeyValueTagInput'))), + ], +) diff --git a/backend/dataall/core/stacks/api/mutations.py b/backend/dataall/core/stacks/api/mutations.py new file mode 100644 index 000000000..a4edd500d --- /dev/null +++ b/backend/dataall/core/stacks/api/mutations.py @@ -0,0 +1,25 @@ +from dataall.base.api import gql +from dataall.core.stacks.api.resolvers import update_stack, update_key_value_tags + + +updateStack = gql.MutationField( + name='updateStack', + type=gql.Ref('Stack'), + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + resolver=update_stack, +) + + +updateKeyValueTags = gql.MutationField( + name='updateKeyValueTags', + type=gql.ArrayType(gql.Ref('KeyValueTag')), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('UpdateKeyValueTagsInput')) + ), + ], + resolver=update_key_value_tags, +) diff --git a/backend/dataall/core/stacks/api/queries.py b/backend/dataall/core/stacks/api/queries.py new file mode 100644 index 000000000..f4eac4413 --- /dev/null +++ b/backend/dataall/core/stacks/api/queries.py @@ -0,0 +1,33 @@ +from dataall.base.api import gql +from dataall.core.stacks.api.resolvers import get_stack, get_stack_logs, list_key_value_tags + +getStack = gql.QueryField( + name='getStack', + type=gql.Ref('Stack'), + args=[ + gql.Argument(name='environmentUri', type=gql.String), + gql.Argument(name='stackUri', type=gql.NonNullableType(gql.String)), + ], + resolver=get_stack, +) + +getStackLogs = gql.QueryField( + name='getStackLogs', + type=gql.ArrayType(gql.Ref('StackLog')), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='stackUri', type=gql.NonNullableType(gql.String)), + ], + resolver=get_stack_logs, +) + + +listKeyValueTags = gql.QueryField( + name='listKeyValueTags', + type=gql.ArrayType(gql.Ref('KeyValueTag')), + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + resolver=list_key_value_tags, +) diff --git a/backend/dataall/core/stacks/api/resolvers.py b/backend/dataall/core/stacks/api/resolvers.py new file mode 100644 index 000000000..8a5a92131 --- /dev/null +++ b/backend/dataall/core/stacks/api/resolvers.py @@ -0,0 +1,133 @@ +import json +import logging +import os + +from dataall.base.api.context import Context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.aws.cloudformation import CloudFormation +from dataall.core.stacks.aws.cloudwatch import CloudWatch +from dataall.core.stacks.db.stack_models import Stack as StackModel +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.base.db import exceptions +from dataall.base.utils import Parameter + +log = logging.getLogger(__name__) + + +def get_stack( + context: Context, source, environmentUri: str = None, stackUri: str = None +): + with context.engine.scoped_session() as session: + env: Environment = session.query(Environment).get(environmentUri) + stack: StackModel = session.query(StackModel).get(stackUri) + cfn_task = stack_helper.save_describe_stack_task(session, env, stack, None) + CloudFormation.describe_stack_resources(engine=context.engine, task=cfn_task) + return EnvironmentService.get_stack( + session=session, + uri=environmentUri, + stack_uri=stackUri, + ) + + +def resolve_link(context, source, **kwargs): + if not source: + return None + return f'https://{source.region}.console.aws.amazon.com/cloudformation/home?region={source.region}#/stacks/stackinfo?stackId={source.stackid}' + + +def resolve_outputs(context, source: StackModel, **kwargs): + if not source: + return None + return json.dumps(source.outputs or {}) + + +def resolve_resources(context, source: StackModel, **kwargs): + if not source: + return None + return json.dumps(source.resources or {}) + + +def resolve_error(context, source: StackModel, **kwargs): + if not source: + return None + return json.dumps(source.error or {}) + + +def resolve_events(context, source: StackModel, **kwargs): + if not source: + return None + return json.dumps(source.events or {}) + + +def resolve_task_id(context, source: StackModel, **kwargs): + if not source: + return None + if source.EcsTaskArn: + return source.EcsTaskArn.split('/')[-1] + + +def get_stack_logs( + context: Context, source, environmentUri: str = None, stackUri: str = None +): + with context.engine.scoped_session() as session: + stack = EnvironmentService.get_stack( + session=session, + uri=environmentUri, + stack_uri=stackUri + ) + if not stack.EcsTaskArn: + raise exceptions.AWSResourceNotFound( + action='GET_STACK_LOGS', + message='Logs could not be found for this stack', + ) + + query = f"""fields @timestamp, @message, @logStream, @log as @logGroup + | sort @timestamp asc + | filter @logStream like "{stack.EcsTaskArn.split('/')[-1]}" + """ + envname = os.getenv('envname', 'local') + results = CloudWatch.run_query( + query=query, + log_group_name=f"/{Parameter().get_parameter(env=envname, path='resourcePrefix')}/{envname}/ecs/cdkproxy", + days=1, + ) + log.info(f'Running Logs query {query}') + return results + + +def update_stack( + context: Context, source, targetUri: str = None, targetType: str = None +): + with context.engine.scoped_session() as session: + stack = Stack.update_stack( + session=session, + uri=targetUri, + target_type=targetType + ) + stack_helper.deploy_stack(stack.targetUri) + return stack + + +def list_key_value_tags( + context: Context, source, targetUri: str = None, targetType: str = None +): + with context.engine.scoped_session() as session: + return KeyValueTag.list_key_value_tags( + session=session, + uri=targetUri, + target_type=targetType, + ) + + +def update_key_value_tags(context: Context, source, input=None): + with context.engine.scoped_session() as session: + kv_tags = KeyValueTag.update_key_value_tags( + session=session, + uri=input['targetUri'], + data=input, + ) + stack_helper.deploy_stack(targetUri=input['targetUri']) + return kv_tags diff --git a/backend/dataall/core/stacks/api/stack_helper.py b/backend/dataall/core/stacks/api/stack_helper.py new file mode 100644 index 000000000..530792b68 --- /dev/null +++ b/backend/dataall/core/stacks/api/stack_helper.py @@ -0,0 +1,110 @@ +import os + +import requests + +from dataall.core.tasks.service_handlers import Worker +from dataall.base.config import config +from dataall.base.context import get_context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.stacks.aws.ecs import Ecs +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.stacks.db.stack_models import Stack as StackModel +from dataall.core.tasks.db.task_models import Task +from dataall.base.utils import Parameter + + +def get_stack_with_cfn_resources(targetUri: str, environmentUri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + env: Environment = session.query(Environment).get(environmentUri) + stack: StackModel = Stack.find_stack_by_target_uri( + session, target_uri=targetUri + ) + if not stack: + stack = StackModel( + stack='environment', + payload={}, + targetUri=targetUri, + accountid=env.AwsAccountId if env else 'UNKNOWN', + region=env.region if env else 'UNKNOWN', + resources=str({}), + error=str({}), + outputs=str({}), + ) + return stack + + cfn_task = save_describe_stack_task(session, env, stack, targetUri) + Worker.queue(engine=context.db_engine, task_ids=[cfn_task.taskUri]) + return stack + + +def save_describe_stack_task(session, environment, stack, target_uri): + cfn_task = Task( + targetUri=stack.stackUri, + action='cloudformation.stack.describe_resources', + payload={ + 'accountid': environment.AwsAccountId, + 'region': environment.region, + 'role_arn': environment.CDKRoleArn, + 'stack_name': stack.name, + 'stackUri': stack.stackUri, + 'targetUri': target_uri, + }, + ) + session.add(cfn_task) + session.commit() + return cfn_task + + +def deploy_stack(targetUri): + context = get_context() + with context.db_engine.scoped_session() as session: + stack: StackModel = Stack.get_stack_by_target_uri( + session, target_uri=targetUri + ) + envname = os.getenv('envname', 'local') + + if envname in ['local', 'pytest', 'dkrcompose']: + requests.post(f'{config.get_property("cdk_proxy_url")}/stack/{stack.stackUri}') + + else: + cluster_name = Parameter().get_parameter( + env=envname, path='ecs/cluster/name' + ) + if not Ecs.is_task_running(cluster_name, f'awsworker-{stack.stackUri}'): + stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack.stackUri) + else: + task: Task = Task( + action='ecs.cdkproxy.deploy', targetUri=stack.stackUri + ) + session.add(task) + session.commit() + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + + return stack + + +def delete_stack( + target_uri, accountid, cdk_role_arn, region +): + context = get_context() + with context.db_engine.scoped_session() as session: + stack: StackModel = Stack.find_stack_by_target_uri( + session, target_uri=target_uri + ) + if not stack: + return + task = Task( + targetUri=target_uri, + action='cloudformation.stack.delete', + payload={ + 'accountid': accountid, + 'region': region, + 'cdk_role_arn': cdk_role_arn, + 'stack_name': stack.name, + }, + ) + session.add(task) + + Worker.queue(context.db_engine, [task.taskUri]) + return True diff --git a/backend/dataall/core/stacks/api/types.py b/backend/dataall/core/stacks/api/types.py new file mode 100644 index 000000000..0d03faa55 --- /dev/null +++ b/backend/dataall/core/stacks/api/types.py @@ -0,0 +1,49 @@ +from dataall.base.api import gql +from dataall.core.stacks.api.resolvers import ( + resolve_link, resolve_resources, resolve_outputs, resolve_events, resolve_task_id, resolve_error +) + +Stack = gql.ObjectType( + name='Stack', + fields=[ + gql.Field(name='stackUri', type=gql.ID), + gql.Field(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='stack', type=gql.NonNullableType(gql.String)), + gql.Field(name='environmentUri', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='accountid', type=gql.NonNullableType(gql.String)), + gql.Field(name='region', type=gql.NonNullableType(gql.String)), + gql.Field(name='status', type=gql.String), + gql.Field(name='stackid', type=gql.String), + gql.Field(name='link', type=gql.String, resolver=resolve_link), + gql.Field(name='outputs', type=gql.String, resolver=resolve_outputs), + gql.Field(name='resources', type=gql.String, resolver=resolve_resources), + gql.Field(name='error', type=gql.String, resolver=resolve_error), + gql.Field(name='events', type=gql.String, resolver=resolve_events), + gql.Field(name='EcsTaskArn', type=gql.String), + gql.Field(name='EcsTaskId', type=gql.String, resolver=resolve_task_id), + ], +) + +StackLog = gql.ObjectType( + name='StackLog', + fields=[ + gql.Field(name='logStream', type=gql.String), + gql.Field(name='logGroup', type=gql.String), + gql.Field(name='timestamp', type=gql.String), + gql.Field(name='message', type=gql.String), + ], +) + + +KeyValueTag = gql.ObjectType( + name='KeyValueTag', + fields=[ + gql.Field(name='tagUri', type=gql.ID), + gql.Field(name='targetType', type=gql.String), + gql.Field(name='targetUri', type=gql.String), + gql.Field(name='key', type=gql.String), + gql.Field(name='value', type=gql.String), + gql.Field(name='cascade', type=gql.Boolean), + ], +) diff --git a/tests/utils/__init__.py b/backend/dataall/core/stacks/aws/__init__.py similarity index 100% rename from tests/utils/__init__.py rename to backend/dataall/core/stacks/aws/__init__.py diff --git a/backend/dataall/core/stacks/aws/cloudformation.py b/backend/dataall/core/stacks/aws/cloudformation.py new file mode 100644 index 000000000..c4c859fcf --- /dev/null +++ b/backend/dataall/core/stacks/aws/cloudformation.py @@ -0,0 +1,169 @@ +import logging +import uuid + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.core.stacks.db.stack_models import Stack +from dataall.core.tasks.db.task_models import Task +from dataall.base.utils import json_utils + +log = logging.getLogger(__name__) + + +class CloudFormation: + def __init__(self): + pass + + @staticmethod + def client(AwsAccountId, region, role=None): + session = SessionHelper.remote_session(accountid=AwsAccountId, role=role) + return session.client('cloudformation', region_name=region) + + @staticmethod + def check_existing_cdk_toolkit_stack(AwsAccountId, region): + role = SessionHelper.get_cdk_look_up_role_arn(accountid=AwsAccountId, region=region) + try: + cfn = CloudFormation.client(AwsAccountId=AwsAccountId, region=region, role=role) + response = cfn.describe_stacks(StackName='CDKToolkit') + except ClientError as e: + log.exception(f'CDKToolkitNotFound: {e}') + raise Exception('CDKToolkitNotFound') + + try: + response = cfn.describe_stack_resource( + StackName='CDKToolkit', LogicalResourceId='CloudFormationExecutionRole' + ) + cdk_role_name = response['StackResourceDetail']['PhysicalResourceId'] + return cdk_role_name + except ClientError as e: + log.exception(f'CDKToolkitDeploymentActionRoleNotFound: {e}') + raise Exception(f'CDKToolkitDeploymentActionRoleNotFound: {e}') + + @staticmethod + def delete_cloudformation_stack(**data): + accountid = data['accountid'] + region = data['region'] + stack_name = data['stack_name'] + try: + aws_session = SessionHelper.remote_session(accountid=accountid) + cfnclient = aws_session.client('cloudformation', region_name=region) + response = cfnclient.delete_stack( + StackName=stack_name, + ClientRequestToken=str(uuid.uuid4()), + ) + log.info(f'Stack {stack_name} deleted: {response}') + except ClientError as e: + log.error(f'Failed to delete stack {stack_name}') + raise e + + @staticmethod + def _get_stack(**data) -> dict: + try: + accountid = data['accountid'] + region = data['region'] + stack_name = data['stack_name'] + aws_session = SessionHelper.remote_session(accountid=accountid) + cfnclient = aws_session.client('cloudformation', region_name=region) + response = cfnclient.describe_stacks(StackName=stack_name) + return response['Stacks'][0] + except ClientError as e: + raise e + + @staticmethod + def describe_stack_resources(engine, task: Task): + try: + filtered_resources = [] + filtered_events = [] + filtered_outputs = {} + data = { + 'accountid': task.payload['accountid'], + 'region': task.payload['region'], + 'stack_name': task.payload['stack_name'], + } + + cfn_stack = CloudFormation._get_stack(**data) + stack_arn = cfn_stack['StackId'] + status = cfn_stack['StackStatus'] + stack_outputs = cfn_stack.get('Outputs', []) + if stack_outputs: + for output in stack_outputs: + print(output) + filtered_outputs[output['OutputKey']] = output['OutputValue'] + resources = CloudFormation._describe_stack_resources(**data)[ + 'StackResources' + ] + events = CloudFormation._describe_stack_events(**data)['StackEvents'] + with engine.scoped_session() as session: + stack: Stack = session.query(Stack).get( + task.payload['stackUri'] + ) + stack.status = status + stack.stackid = stack_arn + stack.outputs = filtered_outputs + for resource in resources: + filtered_resources.append( + { + 'ResourceStatus': resource.get('ResourceStatus'), + 'LogicalResourceId': resource.get('LogicalResourceId'), + 'PhysicalResourceId': resource.get('PhysicalResourceId'), + 'ResourceType': resource.get('ResourceType'), + 'StackName': resource.get('StackName'), + 'StackId': resource.get('StackId'), + } + ) + stack.resources = {'resources': filtered_resources} + for event in events: + filtered_events.append( + { + 'ResourceStatus': event.get('ResourceStatus'), + 'LogicalResourceId': event.get('LogicalResourceId'), + 'PhysicalResourceId': event.get('PhysicalResourceId'), + 'ResourceType': event.get('ResourceType'), + 'StackName': event.get('StackName'), + 'StackId': event.get('StackId'), + 'EventId': event.get('EventId'), + 'ResourceStatusReason': event.get('ResourceStatusReason'), + } + ) + stack.events = {'events': filtered_events} + stack.error = None + session.commit() + except ClientError as e: + with engine.scoped_session() as session: + stack: Stack = session.query(Stack).get( + task.payload['stackUri'] + ) + if not stack.error: + stack.error = { + 'error': json_utils.to_string(e.response['Error']['Message']) + } + session.commit() + + @staticmethod + def _describe_stack_resources(**data): + accountid = data['accountid'] + region = data.get('region', 'eu-west-1') + stack_name = data['stack_name'] + aws_session = SessionHelper.remote_session(accountid=accountid) + client = aws_session.client('cloudformation', region_name=region) + try: + stack_resources = client.describe_stack_resources(StackName=stack_name) + log.info(f'Stack describe resources response : {stack_resources}') + return stack_resources + except ClientError as e: + log.error(e, exc_info=True) + + @staticmethod + def _describe_stack_events(**data): + accountid = data['accountid'] + region = data.get('region', 'eu-west-1') + stack_name = data['stack_name'] + aws_session = SessionHelper.remote_session(accountid=accountid) + client = aws_session.client('cloudformation', region_name=region) + try: + stack_events = client.describe_stack_events(StackName=stack_name) + log.info(f'Stack describe events response : {stack_events}') + return stack_events + except ClientError as e: + log.error(e, exc_info=True) diff --git a/backend/dataall/aws/handlers/cloudwatch.py b/backend/dataall/core/stacks/aws/cloudwatch.py similarity index 100% rename from backend/dataall/aws/handlers/cloudwatch.py rename to backend/dataall/core/stacks/aws/cloudwatch.py diff --git a/backend/dataall/core/stacks/aws/ecs.py b/backend/dataall/core/stacks/aws/ecs.py new file mode 100644 index 000000000..a2de687be --- /dev/null +++ b/backend/dataall/core/stacks/aws/ecs.py @@ -0,0 +1,108 @@ +import logging +import os + +import boto3 +from botocore.exceptions import ClientError + +from dataall.base.utils import Parameter + +log = logging.getLogger('aws:ecs') + + +class Ecs: + def __init__(self): + pass + + @staticmethod + def run_cdkproxy_task(stack_uri): + task_arn = Ecs.run_ecs_task( + task_definition_param='ecs/task_def_arn/cdkproxy', + container_name_param='ecs/container/cdkproxy', + context=[{'name': 'stackUri', 'value': stack_uri}], + started_by=f'awsworker-{stack_uri}', + ) + log.info(f'ECS Task {task_arn} running') + return task_arn + + @staticmethod + def run_ecs_task( + task_definition_param, + container_name_param, + context, + started_by='awsworker', + ): + try: + envname = os.environ.get('envname', 'local') + cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') + subnets = Parameter().get_parameter(env=envname, path='ecs/private_subnets') + security_groups = Parameter().get_parameter( + env=envname, path='ecs/security_groups' + ) + + task_definition = Parameter().get_parameter( + env=envname, path=task_definition_param + ) + container_name = Parameter().get_parameter( + env=envname, path=container_name_param + ) + + response = boto3.client('ecs').run_task( + cluster=cluster_name, + taskDefinition=task_definition, + count=1, + launchType='FARGATE', + networkConfiguration={ + 'awsvpcConfiguration': { + 'subnets': subnets.split(','), + 'securityGroups': security_groups.split(','), + } + }, + overrides={ + 'containerOverrides': [ + { + 'name': container_name, + 'environment': [ + {'name': 'config_location', 'value': '/config.json'}, + {'name': 'envname', 'value': envname}, + { + 'name': 'AWS_REGION', + 'value': os.getenv('AWS_REGION', 'eu-west-1'), + }, + *context + ], + } + ] + }, + startedBy=started_by, + ) + if response['failures']: + raise Exception( + ', '.join( + [ + 'fail to run task {0} reason: {1}'.format( + failure['arn'], failure['reason'] + ) + for failure in response['failures'] + ] + ) + ) + task_arn = response.get('tasks', [{'taskArn': None}])[0]['taskArn'] + log.info(f'Task started {task_arn}..') + return task_arn + except ClientError as e: + log.error(e) + raise e + + @staticmethod + def is_task_running(cluster_name, started_by): + try: + client = boto3.client('ecs') + running_tasks = client.list_tasks( + cluster=cluster_name, startedBy=started_by, desiredStatus='RUNNING' + ) + if running_tasks and running_tasks.get('taskArns'): + return True + return False + except ClientError as e: + log.error(e) + raise e diff --git a/tests/utils/clients/__init__.py b/backend/dataall/core/stacks/db/__init__.py similarity index 100% rename from tests/utils/clients/__init__.py rename to backend/dataall/core/stacks/db/__init__.py diff --git a/backend/dataall/core/stacks/db/keyvaluetag_repositories.py b/backend/dataall/core/stacks/db/keyvaluetag_repositories.py new file mode 100644 index 000000000..16a051b89 --- /dev/null +++ b/backend/dataall/core/stacks/db/keyvaluetag_repositories.py @@ -0,0 +1,104 @@ +import logging + +from dataall.base.context import get_context +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.stacks.db import stack_models as models +from dataall.core.stacks.db.target_type_repositories import TargetType +from dataall.base.db import exceptions + +logger = logging.getLogger(__name__) + + +class KeyValueTag: + @staticmethod + def update_key_value_tags(session, uri: str, data: dict = None) -> [models.KeyValueTag]: + if not uri: + raise exceptions.RequiredParameter('targetUri') + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('targetType'): + raise exceptions.RequiredParameter('targetType') + + context = get_context() + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=uri, + permission_name=TargetType.get_resource_update_permission_name( + data['targetType'] + ), + ) + + tag_keys = [tag['key'].lower() for tag in data.get('tags', [])] + if tag_keys and len(tag_keys) != len(set(tag_keys)): + raise exceptions.UnauthorizedOperation( + action='SAVE_KEY_VALUE_TAGS', + message='Duplicate tag keys found. Please note that Tag keys are case insensitive', + ) + + tags = [] + session.query(models.KeyValueTag).filter( + models.KeyValueTag.targetUri == uri, + models.KeyValueTag.targetType == data['targetType'], + ).delete() + for tag in data.get('tags'): + kv_tag: models.KeyValueTag = models.KeyValueTag( + targetUri=uri, + targetType=data['targetType'], + key=tag['key'], + value=tag['value'], + cascade=tag['cascade'] + ) + tags.append(kv_tag) + session.add(kv_tag) + + return tags + + @staticmethod + def list_key_value_tags(session, uri, target_type) -> dict: + context = get_context() + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=uri, + permission_name=TargetType.get_resource_read_permission_name( + target_type + ), + ) + return KeyValueTag.find_key_value_tags(session, uri, target_type) + + @staticmethod + def find_key_value_tags(session, target_uri, target_type) -> [models.KeyValueTag]: + return ( + session.query(models.KeyValueTag) + .filter( + models.KeyValueTag.targetUri == target_uri, + models.KeyValueTag.targetType == target_type, + ) + .all() + ) + + @staticmethod + def find_environment_cascade_key_value_tags(session, target_uri) -> [models.KeyValueTag]: + return ( + session.query(models.KeyValueTag) + .filter( + models.KeyValueTag.targetUri == target_uri, + models.KeyValueTag.targetType == 'environment', + models.KeyValueTag.cascade.is_(True), + ) + .all() + ) + + @staticmethod + def delete_key_value_tags(session, target_uri, target_type): + return ( + session.query(models.KeyValueTag) + .filter( + models.KeyValueTag.targetUri == target_uri, + models.KeyValueTag.targetType == target_type, + ) + .delete() + ) diff --git a/backend/dataall/core/stacks/db/stack_models.py b/backend/dataall/core/stacks/db/stack_models.py new file mode 100644 index 000000000..ef8e29fa1 --- /dev/null +++ b/backend/dataall/core/stacks/db/stack_models.py @@ -0,0 +1,43 @@ +import datetime + +from sqlalchemy import Column, DateTime, String, Boolean +from sqlalchemy.dialects import postgresql + +from dataall.base.db import Base +from dataall.base.db import utils + + +class Stack(Base): + __tablename__ = 'stack' + stackUri = Column( + String, nullable=False, default=utils.uuid('stack'), primary_key=True + ) + name = Column(String, nullable=True) + targetUri = Column(String, nullable=False) + accountid = Column(String, nullable=False) + region = Column(String, nullable=False) + cronexpr = Column(String, nullable=True) + status = Column(String, nullable=False, default='pending') + stack = Column(String, nullable=False) + payload = Column(postgresql.JSON, nullable=True) + created = Column(DateTime, default=datetime.datetime.now()) + updated = Column(DateTime, onupdate=datetime.datetime.now()) + stackid = Column(String) + outputs = Column(postgresql.JSON) + resources = Column(postgresql.JSON) + error = Column(postgresql.JSON) + events = Column(postgresql.JSON) + lastSeen = Column( + DateTime, default=lambda: datetime.datetime(year=1900, month=1, day=1) + ) + EcsTaskArn = Column(String, nullable=True) + + +class KeyValueTag(Base): + __tablename__ = 'keyvaluetag' + tagUri = Column(String, primary_key=True, default=utils.uuid('keyvaluetag')) + targetUri = Column(String, nullable=False) + targetType = Column(String, nullable=False) + key = Column(String, nullable=False) + value = Column(String, nullable=False) + cascade = Column(Boolean, default=False) diff --git a/backend/dataall/core/stacks/db/stack_repositories.py b/backend/dataall/core/stacks/db/stack_repositories.py new file mode 100644 index 000000000..3fa8533ca --- /dev/null +++ b/backend/dataall/core/stacks/db/stack_repositories.py @@ -0,0 +1,96 @@ +import logging + +from dataall.base.context import get_context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.stacks.db import stack_models as models +from dataall.core.stacks.db.target_type_repositories import TargetType +from dataall.base.db import exceptions +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) + +log = logging.getLogger(__name__) + + +class Stack: + @staticmethod + def get_stack_by_target_uri(session, target_uri): + stack = Stack.find_stack_by_target_uri(session, target_uri) + if not stack: + raise exceptions.ObjectNotFound('Stack', target_uri) + return stack + + @staticmethod + def find_stack_by_target_uri(session, target_uri): + stack: models.Stack = ( + session.query(models.Stack) + .filter(models.Stack.targetUri == target_uri) + .first() + ) + return stack + + @staticmethod + def get_stack_by_uri(session, stack_uri): + stack = Stack.find_stack_by_uri(session, stack_uri) + if not stack: + raise exceptions.ObjectNotFound('Stack', stack_uri) + return stack + + @staticmethod + def find_stack_by_uri(session, stack_uri): + stack: models.Stack = session.query(models.Stack).get(stack_uri) + return stack + + @staticmethod + def create_stack( + session, environment_uri, target_label, target_uri, target_type, payload=None + ) -> models.Stack: + environment: Environment = session.query(Environment).get( + environment_uri + ) + if not environment: + raise exceptions.ObjectNotFound('Environment', environment_uri) + + stack = models.Stack( + targetUri=target_uri, + accountid=environment.AwsAccountId, + region=environment.region, + stack=target_type, + payload=payload, + name=NamingConventionService( + target_label=target_type, + target_uri=target_uri, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name(), + ) + session.add(stack) + session.commit() + return stack + + @staticmethod + def update_stack( + session, + uri: str, + target_type: str + ) -> [models.Stack]: + + if not uri: + raise exceptions.RequiredParameter('targetUri') + if not target_type: + raise exceptions.RequiredParameter('targetType') + + context = get_context() + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=uri, + permission_name=TargetType.get_resource_update_permission_name( + target_type + ), + ) + stack = Stack.get_stack_by_target_uri(session, target_uri=uri) + return stack diff --git a/backend/dataall/core/stacks/db/target_type_repositories.py b/backend/dataall/core/stacks/db/target_type_repositories.py new file mode 100644 index 000000000..0aa8ce8d7 --- /dev/null +++ b/backend/dataall/core/stacks/db/target_type_repositories.py @@ -0,0 +1,40 @@ +import logging + +from dataall.base.db import exceptions +from dataall.core.permissions import permissions + +logger = logging.getLogger(__name__) + + +class TargetType: + """Resolves the read/write permissions for different type of resources (target types)""" + _TARGET_TYPES = {} + + def __init__(self, name, read_permission, write_permission): + self.name = name + self.read_permission = read_permission + self.write_permission = write_permission + + TargetType._TARGET_TYPES[name] = self + + @staticmethod + def get_resource_update_permission_name(target_type): + TargetType.is_supported_target_type(target_type) + return TargetType._TARGET_TYPES[target_type].write_permission + + @staticmethod + def get_resource_read_permission_name(target_type): + TargetType.is_supported_target_type(target_type) + return TargetType._TARGET_TYPES[target_type].read_permission + + @staticmethod + def is_supported_target_type(target_type): + if target_type not in TargetType._TARGET_TYPES: + raise exceptions.InvalidInput( + 'targetType', + target_type, + ' or '.join(TargetType._TARGET_TYPES.keys()), + ) + + +TargetType("environment", permissions.GET_ENVIRONMENT, permissions.UPDATE_ENVIRONMENT) diff --git a/backend/dataall/core/stacks/handlers/__init__.py b/backend/dataall/core/stacks/handlers/__init__.py new file mode 100644 index 000000000..acc42944f --- /dev/null +++ b/backend/dataall/core/stacks/handlers/__init__.py @@ -0,0 +1,7 @@ +""" +Contains code with the handlers that are need for async +processing in a separate lambda function +""" +from dataall.core.stacks.handlers import stack_handlers + +__all__ = ["stack_handlers"] diff --git a/backend/dataall/core/stacks/handlers/stack_handlers.py b/backend/dataall/core/stacks/handlers/stack_handlers.py new file mode 100644 index 000000000..c2678a319 --- /dev/null +++ b/backend/dataall/core/stacks/handlers/stack_handlers.py @@ -0,0 +1,57 @@ +import logging +import os +import time + +from botocore.exceptions import ClientError + +from dataall.core.tasks.service_handlers import Worker +from dataall.core.stacks.aws.cloudformation import CloudFormation +from dataall.core.stacks.aws.ecs import Ecs +from dataall.core.stacks.db import stack_models as models +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.tasks.db.task_models import Task +from dataall.base.utils import Parameter + +log = logging.getLogger(__name__) + + +class StackHandlers: + @staticmethod + @Worker.handler(path='cloudformation.stack.delete') + def delete_stack(engine, task: Task): + try: + data = { + 'accountid': task.payload['accountid'], + 'region': task.payload['region'], + 'stack_name': task.payload['stack_name'], + } + CloudFormation.delete_cloudformation_stack(**data) + except ClientError as e: + log.error(f'Failed to delete CFN stack{task.targetUri}: {e}') + raise e + return {'status': 200, 'stackDeleted': True} + + @staticmethod + @Worker.handler(path='cloudformation.stack.describe_resources') + def describe_stack_resources(engine, task: Task): + CloudFormation.describe_stack_resources(engine, task) + + @staticmethod + @Worker.handler(path='ecs.cdkproxy.deploy') + def deploy_stack(engine, task: Task): + with engine.scoped_session() as session: + stack: models.Stack = Stack.get_stack_by_uri( + session, stack_uri=task.targetUri + ) + envname = os.environ.get('envname', 'local') + cluster_name = Parameter().get_parameter( + env=envname, path='ecs/cluster/name' + ) + + while Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{task.targetUri}'): + log.info( + f'ECS task for stack stack-{task.targetUri} is running waiting for 30 seconds before retrying...' + ) + time.sleep(30) + + stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack_uri=task.targetUri) diff --git a/tests/utils/gql/__init__.py b/backend/dataall/core/stacks/services/__init__.py similarity index 100% rename from tests/utils/gql/__init__.py rename to backend/dataall/core/stacks/services/__init__.py diff --git a/backend/dataall/core/stacks/services/runtime_stacks_tagging.py b/backend/dataall/core/stacks/services/runtime_stacks_tagging.py new file mode 100644 index 000000000..af9cca2f9 --- /dev/null +++ b/backend/dataall/core/stacks/services/runtime_stacks_tagging.py @@ -0,0 +1,167 @@ +import os +import typing +from enum import Enum + +from aws_cdk import Stack, Tags + +from dataall.base import db +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.core.stacks.db.stack_models import KeyValueTag as KeyValueTagModel + + +# Tag keys for Stacks +class StackTagName(Enum): + def __str__(self): + return str(self.value) + + CREATOR = 'Creator' + ORGANISATION = 'Organization' + ENVIRONMENT = 'Environment' + TARGET = 'Target' + TEAM = 'Team' + DATAALL = 'dataall' + + +# Tags adding class +class TagsUtil: + def __init__(self, stack): + self.stack = stack + + @classmethod + def add_tags(cls, stack: Stack, model, target_type) -> [tuple]: + """ + A class method that adds tags to a Stack + """ + + # Get the list of tags to be added from the tag factory + stack_tags_to_add = cls.tag_factory(stack, model, target_type) + + # Add the tags to the Stack + for tag in stack_tags_to_add: + Tags.of(stack).add(str(tag[0]), str(tag[1])) + + return stack_tags_to_add + + @classmethod + def tag_factory(cls, stack: Stack, model_name, target_type) -> typing.List[typing.Tuple]: + """ + A class method that returns tags to be added to a Stack (based on Stack type) + """ + + _stack_tags = [] + + engine = cls.get_engine() + + # Initialize references to stack's environment and organisation + with engine.scoped_session() as session: + target_stack = cls.get_target(session, stack, model_name) + environment = cls.get_environment(session, target_stack) + organisation = cls.get_organization(session, environment) + key_value_tags: [KeyValueTagModel] = cls.get_model_key_value_tags( + session, stack, target_type + ) + cascaded_tags: [KeyValueTagModel] = cls.get_environment_cascade_key_value_tags( + session, environment.environmentUri + ) + + # Build a list of tuples with tag keys and values based on the collected up to this point + # ex. target_stack, organisation etc. + _common_stack_tags = [ + (StackTagName.CREATOR.value, target_stack.owner), + ( + StackTagName.ORGANISATION.value, + organisation.name + '_' + organisation.organizationUri, + ), + ( + StackTagName.ENVIRONMENT.value, + environment.name + '_' + environment.environmentUri, + ), + ( + StackTagName.TEAM.value, + ( + target_stack.SamlGroupName + if hasattr(target_stack, 'SamlGroupName') + else target_stack.SamlAdminGroupName + ), + ), + ( + StackTagName.TARGET.value, + model_name.__name__ + '_' + stack.target_uri, + ), + ( + StackTagName.DATAALL.value, + 'true', + ), + ] + + # Build the final tag list with common tags + _stack_tags.extend(_common_stack_tags) + + # ..and any additional key value tags + _stack_tags.extend(key_value_tags) + + # .. and cascade tags inherited form the environment + _stack_tags.extend(cascaded_tags) + + # Duplicate tag keys are not allowed on CloudFormation. Also Tag keys are case insensitive + _stack_tags = list(cls.remove_duplicate_tag_keys(_stack_tags).values()) + + return _stack_tags + + @classmethod + def get_engine(cls): + envname = os.environ.get('envname', 'local') + engine = db.get_engine(envname=envname) + return engine + + @classmethod + def get_target(cls, session, stack, model_name): + return session.query(model_name).get(stack.target_uri) + + @classmethod + def get_organization(cls, session, environment): + organisation = Organization.get_organization_by_uri( + session, environment.organizationUri + ) + return organisation + + @classmethod + def get_environment(cls, session, target_stack): + environment: Environment = EnvironmentService.get_environment_by_uri( + session, target_stack.environmentUri + ) + return environment + + @classmethod + def get_model_key_value_tags(cls, session, stack, target_type): + return [ + (kv.key, kv.value) + for kv in KeyValueTag.find_key_value_tags( + session, + stack.target_uri, + target_type, + ) + ] + + @classmethod + def get_environment_cascade_key_value_tags(cls, session, environmentUri): + return [ + (kv.key, kv.value) + for kv in KeyValueTag.find_environment_cascade_key_value_tags( + session, + environmentUri, + ) + ] + + @classmethod + def remove_duplicate_tag_keys(cls, _stack_tags): + compare_dict = dict() + results_dict = dict() + for key, value in reversed(_stack_tags): + if key.lower() not in compare_dict: # we see this key for the first time + compare_dict[key.lower()] = (key, value) + results_dict[key] = (key, value) + return results_dict diff --git a/backend/dataall/api/Objects/Activity/mutations.py b/backend/dataall/core/stacks/tasks/__init__.py similarity index 100% rename from backend/dataall/api/Objects/Activity/mutations.py rename to backend/dataall/core/stacks/tasks/__init__.py diff --git a/backend/dataall/tasks/cdkproxy.py b/backend/dataall/core/stacks/tasks/cdkproxy.py similarity index 80% rename from backend/dataall/tasks/cdkproxy.py rename to backend/dataall/core/stacks/tasks/cdkproxy.py index f8e4da38a..d9a863e7c 100644 --- a/backend/dataall/tasks/cdkproxy.py +++ b/backend/dataall/core/stacks/tasks/cdkproxy.py @@ -2,8 +2,8 @@ import os import sys -from ..cdkproxy.cdk_cli_wrapper import deploy_cdk_stack -from ..db import get_engine +from dataall.base.cdkproxy.cdk_cli_wrapper import deploy_cdk_stack +from dataall.base.db import get_engine root = logging.getLogger() root.setLevel(logging.INFO) @@ -18,6 +18,6 @@ stack_uri = os.getenv('stackUri') logger.info(f'Starting deployment task for stack : {stack_uri}') - deploy_cdk_stack(engine=engine, stackid=stack_uri, app_path='../cdkproxy/app.py') + deploy_cdk_stack(engine=engine, stackid=stack_uri, app_path='../../base/cdkproxy/app.py') logger.info('Deployment task finished successfully') diff --git a/backend/dataall/api/Objects/Group/mutations.py b/backend/dataall/core/tasks/__init__.py similarity index 100% rename from backend/dataall/api/Objects/Group/mutations.py rename to backend/dataall/core/tasks/__init__.py diff --git a/backend/dataall/api/Objects/Permission/mutations.py b/backend/dataall/core/tasks/db/__init__.py similarity index 100% rename from backend/dataall/api/Objects/Permission/mutations.py rename to backend/dataall/core/tasks/db/__init__.py diff --git a/backend/dataall/core/tasks/db/task_models.py b/backend/dataall/core/tasks/db/task_models.py new file mode 100644 index 000000000..567273354 --- /dev/null +++ b/backend/dataall/core/tasks/db/task_models.py @@ -0,0 +1,26 @@ +import datetime + +from sqlalchemy import Column, DateTime, String +from sqlalchemy.dialects import postgresql + +from dataall.base.db import Base +from dataall.base.db import utils + + +class Task(Base): + __tablename__ = 'task' + taskUri = Column( + String, nullable=False, default=utils.uuid('Task'), primary_key=True + ) + targetUri = Column(String, nullable=False) + cronexpr = Column(String, nullable=True) + status = Column(String, nullable=False, default='pending') + action = Column(String, nullable=False) + payload = Column(postgresql.JSON, nullable=True) + created = Column(DateTime, default=datetime.datetime.now()) + updated = Column(DateTime, onupdate=datetime.datetime.now()) + response = Column(postgresql.JSON) + error = Column(postgresql.JSON) + lastSeen = Column( + DateTime, default=lambda: datetime.datetime(year=1900, month=1, day=1) + ) diff --git a/backend/dataall/aws/handlers/service_handlers.py b/backend/dataall/core/tasks/service_handlers.py similarity index 98% rename from backend/dataall/aws/handlers/service_handlers.py rename to backend/dataall/core/tasks/service_handlers.py index a4b2eda12..9bf41c938 100644 --- a/backend/dataall/aws/handlers/service_handlers.py +++ b/backend/dataall/core/tasks/service_handlers.py @@ -3,8 +3,8 @@ import time from functools import wraps -from ...db.models import Task -from ...utils.json_utils import to_json +from dataall.core.tasks.db.task_models import Task +from dataall.base.utils.json_utils import to_json log = logging.getLogger(__name__) ENVNAME = os.getenv('envname', 'local') diff --git a/backend/dataall/core/vpc/__init__.py b/backend/dataall/core/vpc/__init__.py new file mode 100644 index 000000000..b281f60b5 --- /dev/null +++ b/backend/dataall/core/vpc/__init__.py @@ -0,0 +1 @@ +from dataall.core.vpc import api diff --git a/backend/dataall/core/vpc/api/__init__.py b/backend/dataall/core/vpc/api/__init__.py new file mode 100644 index 000000000..e6f4fa087 --- /dev/null +++ b/backend/dataall/core/vpc/api/__init__.py @@ -0,0 +1,3 @@ +from . import input_types, queries, mutations, resolvers, types + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/core/vpc/api/input_types.py b/backend/dataall/core/vpc/api/input_types.py new file mode 100644 index 000000000..8151ae931 --- /dev/null +++ b/backend/dataall/core/vpc/api/input_types.py @@ -0,0 +1,24 @@ +from dataall.base.api import gql + +VpcFilter = gql.InputType( + name='VpcFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +NewVpcInput = gql.InputType( + name='NewVpcInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='vpcId', type=gql.NonNullableType(gql.String)), + gql.Argument(name='publicSubnetIds', type=gql.ArrayType(gql.String)), + gql.Argument(name='privateSubnetIds', type=gql.ArrayType(gql.String)), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='SamlGroupName', type=gql.NonNullableType(gql.String)), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + ], +) diff --git a/backend/dataall/core/vpc/api/mutations.py b/backend/dataall/core/vpc/api/mutations.py new file mode 100644 index 000000000..b45d383b5 --- /dev/null +++ b/backend/dataall/core/vpc/api/mutations.py @@ -0,0 +1,16 @@ +from dataall.base.api import gql +from .resolvers import * + +createNetwork = gql.MutationField( + name='createNetwork', + type=gql.Ref('Vpc'), + args=[gql.Argument(name='input', type=gql.NonNullableType(gql.Ref('NewVpcInput')))], + resolver=create_network, +) + +deleteNetwork = gql.MutationField( + name='deleteNetwork', + type=gql.Boolean, + args=[gql.Argument(name='vpcUri', type=gql.NonNullableType(gql.String))], + resolver=delete_network, +) diff --git a/backend/dataall/api/Objects/Principal/queries.py b/backend/dataall/core/vpc/api/queries.py similarity index 100% rename from backend/dataall/api/Objects/Principal/queries.py rename to backend/dataall/core/vpc/api/queries.py diff --git a/backend/dataall/core/vpc/api/resolvers.py b/backend/dataall/core/vpc/api/resolvers.py new file mode 100644 index 000000000..a13750cf6 --- /dev/null +++ b/backend/dataall/core/vpc/api/resolvers.py @@ -0,0 +1,22 @@ +import logging + +from dataall.base.api.context import Context +from dataall.core.vpc.db.vpc_repositories import Vpc + +log = logging.getLogger(__name__) + + +def create_network(context: Context, source, input): + with context.engine.scoped_session() as session: + vpc = Vpc.create_network( + session=session, + uri=input['environmentUri'], + admin_group=input['SamlGroupName'], + data=input, + ) + return vpc + + +def delete_network(context: Context, source, vpcUri=None): + with context.engine.scoped_session() as session: + return Vpc.delete(session=session, uri=vpcUri) diff --git a/backend/dataall/core/vpc/api/types.py b/backend/dataall/core/vpc/api/types.py new file mode 100644 index 000000000..6c1114acd --- /dev/null +++ b/backend/dataall/core/vpc/api/types.py @@ -0,0 +1,35 @@ +from dataall.base.api import gql + +Vpc = gql.ObjectType( + name='Vpc', + fields=[ + gql.Field(name='VpcId', type=gql.NonNullableType(gql.String)), + gql.Field(name='vpcUri', type=gql.NonNullableType(gql.ID)), + gql.Field(name='environment', type=gql.Ref('Environment')), + gql.Field(name='label', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='AwsAccountId', type=gql.NonNullableType(gql.String)), + gql.Field(name='region', type=gql.NonNullableType(gql.String)), + gql.Field(name='privateSubnetIds', type=gql.ArrayType(gql.String)), + gql.Field(name='publicSubnetIds', type=gql.ArrayType(gql.String)), + gql.Field(name='SamlGroupName', type=gql.String), + gql.Field(name='default', type=gql.Boolean), + ], +) +VpcSearchResult = gql.ObjectType( + name='VpcSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Vpc'))), + ], +) diff --git a/backend/dataall/api/Objects/Stack/input_types.py b/backend/dataall/core/vpc/db/__init__.py similarity index 100% rename from backend/dataall/api/Objects/Stack/input_types.py rename to backend/dataall/core/vpc/db/__init__.py diff --git a/backend/dataall/core/vpc/db/vpc_models.py b/backend/dataall/core/vpc/db/vpc_models.py new file mode 100644 index 000000000..8dce4486c --- /dev/null +++ b/backend/dataall/core/vpc/db/vpc_models.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, String, Boolean +from sqlalchemy.dialects.postgresql import ARRAY + +from dataall.base.db import Base, Resource, utils + + +class Vpc(Resource, Base): + __tablename__ = 'vpc' + environmentUri = Column(String, nullable=False) + vpcUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('vpcUri') + ) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + SamlGroupName = Column(String) + VpcId = Column(String, nullable=False) + privateSubnetIds = Column(ARRAY(String)) + publicSubnetIds = Column(ARRAY(String)) + default = Column(Boolean, default=False) diff --git a/backend/dataall/core/vpc/db/vpc_repositories.py b/backend/dataall/core/vpc/db/vpc_repositories.py new file mode 100644 index 000000000..cf041604c --- /dev/null +++ b/backend/dataall/core/vpc/db/vpc_repositories.py @@ -0,0 +1,141 @@ +import logging + +from sqlalchemy import and_ + +from dataall.core.environment.db.environment_repositories import EnvironmentRepository +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.base.db import exceptions +from dataall.core.permissions import permissions +from dataall.core.vpc.db import vpc_models as models +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.base.context import get_context +from dataall.core.activity.db.activity_models import Activity +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy + +log = logging.getLogger(__name__) + + +class Vpc: + def __init__(self): + pass + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.CREATE_NETWORK) + @has_group_permission(permissions.CREATE_NETWORK) + def create_network(session, uri: str, admin_group: str, data: dict = None) -> models.Vpc: + Vpc._validate_input(data) + username = get_context().username + + vpc = ( + session.query(models.Vpc) + .filter( + and_( + models.Vpc.VpcId == data['vpcId'], models.Vpc.environmentUri == uri + ) + ) + .first() + ) + + if vpc: + raise exceptions.ResourceAlreadyExists( + action=permissions.CREATE_NETWORK, + message=f'Vpc {data["vpcId"]} is already associated to environment {uri}', + ) + + environment = EnvironmentRepository.get_environment_by_uri(session, uri) + vpc = models.Vpc( + environmentUri=environment.environmentUri, + region=environment.region, + AwsAccountId=environment.AwsAccountId, + VpcId=data['vpcId'], + privateSubnetIds=data.get('privateSubnetIds', []), + publicSubnetIds=data.get('publicSubnetIds', []), + SamlGroupName=data['SamlGroupName'], + owner=username, + label=data['label'], + name=data['label'], + default=data.get('default', False), + ) + session.add(vpc) + session.commit() + + activity = Activity( + action='NETWORK:CREATE', + label='NETWORK:CREATE', + owner=username, + summary=f'{username} created network {vpc.label} in {environment.label}', + targetUri=vpc.vpcUri, + targetType='Vpc', + ) + session.add(activity) + + ResourcePolicy.attach_resource_policy( + session=session, + group=vpc.SamlGroupName, + permissions=permissions.NETWORK_ALL, + resource_uri=vpc.vpcUri, + resource_type=models.Vpc.__name__, + ) + + if environment.SamlGroupName != vpc.SamlGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=permissions.NETWORK_ALL, + resource_uri=vpc.vpcUri, + resource_type=models.Vpc.__name__, + ) + + return vpc + + @staticmethod + def _validate_input(data): + if not data: + raise exceptions.RequiredParameter(data) + if not data.get('environmentUri'): + raise exceptions.RequiredParameter('environmentUri') + if not data.get('SamlGroupName'): + raise exceptions.RequiredParameter('group') + if not data.get('label'): + raise exceptions.RequiredParameter('label') + + @staticmethod + @has_tenant_permission(permissions.MANAGE_ENVIRONMENTS) + @has_resource_permission(permissions.DELETE_NETWORK) + def delete(session, uri) -> bool: + vpc = Vpc.get_vpc_by_uri(session, uri) + session.delete(vpc) + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=vpc.SamlGroupName + ) + session.commit() + return True + + @staticmethod + def get_vpc_by_uri(session, vpc_uri) -> models.Vpc: + vpc = session.query(models.Vpc).get(vpc_uri) + if not vpc: + raise exceptions.ObjectNotFound('VPC', vpc_uri) + return vpc + + @staticmethod + def get_environment_vpc_list(session, environment_uri): + return ( + session.query(models.Vpc) + .filter(models.Vpc.environmentUri == environment_uri) + .all() + ) + + @staticmethod + def get_environment_default_vpc(session, environment_uri): + return ( + session.query(models.Vpc) + .filter( + and_( + models.Vpc.environmentUri == environment_uri, + models.Vpc.default == True, + ) + ) + .first() + ) diff --git a/backend/dataall/db/__init__.py b/backend/dataall/db/__init__.py deleted file mode 100644 index 8e0a0fd83..000000000 --- a/backend/dataall/db/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .base import Base, Resource -from . import models -from . import exceptions -from . import permissions -from .connection import ( - Engine, - get_engine, - create_schema_if_not_exists, - create_schema_and_tables, - has_table, - has_column, - drop_schema_if_exists, - init_permissions, -) -from .dbconfig import DbConfig -from .paginator import paginate -from . import api diff --git a/backend/dataall/db/api/__init__.py b/backend/dataall/db/api/__init__.py deleted file mode 100644 index 369e4faa9..000000000 --- a/backend/dataall/db/api/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -from .permission import Permission -from .tenant import Tenant -from .tenant_policy import TenantPolicy -from .resource_policy import ResourcePolicy -from .permission_checker import has_tenant_perm, has_resource_perm -from .target_type import TargetType -from .keyvaluetag import KeyValueTag -from .stack import Stack -from .organization import Organization -from .environment import Environment -from .glossary import Glossary -from .vote import Vote -from .dataset import Dataset -from .dataset_location import DatasetStorageLocation -from .dataset_profiling_run import DatasetProfilingRun -from .dataset_table import DatasetTable -from .notification import Notification -from .redshift_cluster import RedshiftCluster -from .vpc import Vpc -from .share_object import ShareObject, ShareObjectSM, ShareItemSM -from .notebook import Notebook -from .sgm_studio_notebook import SgmStudioNotebook -from .dashboard import Dashboard -from .pipeline import Pipeline -from .worksheet import Worksheet diff --git a/backend/dataall/db/api/dashboard.py b/backend/dataall/db/api/dashboard.py deleted file mode 100644 index bf6950002..000000000 --- a/backend/dataall/db/api/dashboard.py +++ /dev/null @@ -1,433 +0,0 @@ -import logging - -from sqlalchemy import or_, and_ -from sqlalchemy.orm import Query - -from .. import models, exceptions, permissions, paginate -from . import ( - Environment, - has_tenant_perm, - has_resource_perm, - ResourcePolicy, - Glossary, - Vote, -) - -logger = logging.getLogger(__name__) - - -class Dashboard: - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.CREATE_DASHBOARD) - def import_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Dashboard: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('dashboardId'): - raise exceptions.RequiredParameter('dashboardId') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlGroupName'], - permission_name=permissions.CREATE_DASHBOARD, - ) - - env: models.Environment = data.get( - 'environment', Environment.get_environment_by_uri(session, uri) - ) - dashboard: models.Dashboard = models.Dashboard( - label=data.get('label', 'untitled'), - environmentUri=data.get('environmentUri'), - organizationUri=env.organizationUri, - region=env.region, - DashboardId=data.get('dashboardId'), - AwsAccountId=env.AwsAccountId, - owner=username, - namespace='test', - tags=data.get('tags', []), - SamlGroupName=data['SamlGroupName'], - ) - session.add(dashboard) - session.commit() - - activity = models.Activity( - action='DASHBOARD:CREATE', - label='DASHBOARD:CREATE', - owner=username, - summary=f'{username} created dashboard {dashboard.label} in {env.label}', - targetUri=dashboard.dashboardUri, - targetType='dashboard', - ) - session.add(activity) - - Dashboard.set_dashboard_resource_policy( - session, env, dashboard, data['SamlGroupName'] - ) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - dashboard.dashboardUri, - 'Dashboard', - data.get('terms', []), - ) - return dashboard - - @staticmethod - def set_dashboard_resource_policy(session, environment, dashboard, group): - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=permissions.DASHBOARD_ALL, - resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, - ) - if environment.SamlGroupName != dashboard.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=permissions.DASHBOARD_ALL, - resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, - ) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.GET_DASHBOARD) - def get_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Dashboard: - return Dashboard.get_dashboard_by_uri(session, uri) - - @staticmethod - def get_dashboard_by_uri(session, uri) -> models.Dashboard: - dashboard: models.Dashboard = session.query(models.Dashboard).get(uri) - if not dashboard: - raise exceptions.ObjectNotFound('Dashboard', uri) - return dashboard - - @staticmethod - def query_user_dashboards(session, username, groups, filter) -> Query: - query = ( - session.query(models.Dashboard) - .outerjoin( - models.DashboardShare, - models.Dashboard.dashboardUri == models.DashboardShare.dashboardUri, - ) - .filter( - or_( - models.Dashboard.owner == username, - models.Dashboard.SamlGroupName.in_(groups), - and_( - models.DashboardShare.SamlGroupName.in_(groups), - models.DashboardShare.status - == models.DashboardShareStatus.APPROVED.value, - ), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.Dashboard.description.ilike(filter.get('term') + '%%'), - models.Dashboard.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def paginated_user_dashboards( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Dashboard.query_user_dashboards(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def query_dashboard_shares(session, username, groups, uri, filter) -> Query: - query = ( - session.query(models.DashboardShare) - .join( - models.Dashboard, - models.Dashboard.dashboardUri == models.DashboardShare.dashboardUri, - ) - .filter( - and_( - models.DashboardShare.dashboardUri == uri, - or_( - models.Dashboard.owner == username, - models.Dashboard.SamlGroupName.in_(groups), - ), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.DashboardShare.SamlGroupName.ilike( - filter.get('term') + '%%' - ), - models.Dashboard.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def query_all_user_groups_shareddashboard(session, username, groups, uri) -> Query: - query = ( - session.query(models.DashboardShare) - .filter( - and_( - models.DashboardShare.dashboardUri == uri, - models.DashboardShare.SamlGroupName.in_(groups), - ) - ) - ) - - return [ - share.SamlGroupName - for share in query.all() - ] - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) - def paginated_dashboard_shares( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Dashboard.query_dashboard_shares( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.UPDATE_DASHBOARD) - def update_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Dashboard: - - dashboard = data.get( - 'dashboard', - Dashboard.get_dashboard_by_uri(session, data['dashboardUri']), - ) - - for k in data.keys(): - setattr(dashboard, k, data.get(k)) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - dashboard.dashboardUri, - 'Dashboard', - data.get('terms', []), - ) - environment: models.Environment = Environment.get_environment_by_uri( - session, dashboard.environmentUri - ) - Dashboard.set_dashboard_resource_policy( - session, environment, dashboard, dashboard.SamlGroupName - ) - return dashboard - - @staticmethod - def delete_dashboard( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - dashboard = Dashboard.get_dashboard_by_uri(session, uri) - session.delete(dashboard) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dashboard.SamlGroupName - ) - Glossary.delete_glossary_terms_links( - session, target_uri=dashboard.dashboardUri, target_type='Dashboard' - ) - Vote.delete_votes(session, dashboard.dashboardUri, 'dashboard') - session.commit() - return True - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - def request_dashboard_share( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DashboardShare: - dashboard = Dashboard.get_dashboard_by_uri(session, uri) - if dashboard.SamlGroupName == data['principalId']: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_DASHBOARD, - message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', - ) - share: models.DashboardShare = ( - session.query(models.DashboardShare) - .filter( - models.DashboardShare.dashboardUri == uri, - models.DashboardShare.SamlGroupName == data['principalId'], - ) - .first() - ) - if not share: - share = models.DashboardShare( - owner=username, - dashboardUri=dashboard.dashboardUri, - SamlGroupName=data['principalId'], - status=models.DashboardShareStatus.REQUESTED.value, - ) - session.add(share) - else: - if share.status not in models.DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(models.DashboardShareStatus.__members__), - ) - if share.status == 'REJECTED': - share.status = 'REQUESTED' - - return share - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) - def approve_dashboard_share( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DashboardShare: - - share: models.DashboardShare = data.get( - 'share', session.query(models.DashboardShare).get(data['shareUri']) - ) - - if share.status not in models.DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(models.DashboardShareStatus.__members__), - ) - if share.status == models.DashboardShareStatus.APPROVED.value: - return share - - share.status = models.DashboardShareStatus.APPROVED.value - - ResourcePolicy.attach_resource_policy( - session=session, - group=share.SamlGroupName, - permissions=[permissions.GET_DASHBOARD], - resource_uri=share.dashboardUri, - resource_type=models.Dashboard.__name__, - ) - - return share - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) - def reject_dashboard_share( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DashboardShare: - - share: models.DashboardShare = data.get( - 'share', session.query(models.DashboardShare).get(data['shareUri']) - ) - - if share.status not in models.DashboardShareStatus.__members__: - raise exceptions.InvalidInput( - 'Share status', - share.status, - str(models.DashboardShareStatus.__members__), - ) - if share.status == models.DashboardShareStatus.REJECTED.value: - return share - - share.status = models.DashboardShareStatus.REJECTED.value - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.SamlGroupName, - resource_uri=share.dashboardUri, - resource_type=models.Dashboard.__name__, - ) - - return share - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DASHBOARDS) - @has_resource_perm(permissions.SHARE_DASHBOARD) - def share_dashboard( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DashboardShare: - - dashboard = Dashboard.get_dashboard_by_uri(session, uri) - share = models.DashboardShare( - owner=username, - dashboardUri=dashboard.dashboardUri, - SamlGroupName=data['principalId'], - status=models.DashboardShareStatus.APPROVED.value, - ) - session.add(share) - ResourcePolicy.attach_resource_policy( - session=session, - group=data['principalId'], - permissions=[permissions.GET_DASHBOARD], - resource_uri=dashboard.dashboardUri, - resource_type=models.Dashboard.__name__, - ) - return share - - @staticmethod - def get_dashboard_share_by_uri(session, uri) -> models.DashboardShare: - share: models.DashboardShare = session.query(models.DashboardShare).get(uri) - if not share: - raise exceptions.ObjectNotFound('DashboardShare', uri) - return share diff --git a/backend/dataall/db/api/dataset.py b/backend/dataall/db/api/dataset.py deleted file mode 100644 index f913f7e3e..000000000 --- a/backend/dataall/db/api/dataset.py +++ /dev/null @@ -1,730 +0,0 @@ -import logging -from datetime import datetime - -from sqlalchemy import and_, or_ -from sqlalchemy.orm import Query - -from . import ( - Environment, - has_tenant_perm, - has_resource_perm, - ResourcePolicy, - KeyValueTag, - Vote, - Stack -) -from . import Organization -from .. import models, api, exceptions, permissions, paginate -from ..models.Enums import Language, ConfidentialityClassification -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) - -logger = logging.getLogger(__name__) - - -class Dataset: - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET) - def create_dataset( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Dataset: - if not uri: - raise exceptions.RequiredParameter('environmentUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - if len(data['label']) > 52: - raise exceptions.InvalidInput( - 'Dataset name', data['label'], 'less than 52 characters' - ) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlAdminGroupName'], - permission_name=permissions.CREATE_DATASET, - ) - - environment = Environment.get_environment_by_uri(session, uri) - - organization = Organization.get_organization_by_uri( - session, environment.organizationUri - ) - - dataset = models.Dataset( - label=data.get('label'), - owner=username, - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - AwsAccountId=environment.AwsAccountId, - SamlAdminGroupName=data['SamlAdminGroupName'], - region=environment.region, - S3BucketName='undefined', - GlueDatabaseName='undefined', - IAMDatasetAdminRoleArn='undefined', - IAMDatasetAdminUserArn='undefined', - KmsAlias='undefined', - environmentUri=environment.environmentUri, - organizationUri=environment.organizationUri, - language=data.get('language', Language.English.value), - confidentiality=data.get( - 'confidentiality', ConfidentialityClassification.Unclassified.value - ), - topics=data.get('topics', []), - businessOwnerEmail=data.get('businessOwnerEmail'), - businessOwnerDelegationEmails=data.get('businessOwnerDelegationEmails', []), - stewards=data.get('stewards') - if data.get('stewards') - else data['SamlAdminGroupName'], - ) - session.add(dataset) - session.commit() - - Dataset._set_dataset_aws_resources(dataset, data, environment) - - activity = models.Activity( - action='dataset:create', - label='dataset:create', - owner=username, - summary=f'{username} created dataset {dataset.name} in {environment.name} on organization {organization.name}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=permissions.DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, - ) - if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=permissions.DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, - ) - if environment.SamlGroupName != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=permissions.DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, - ) - return dataset - - @staticmethod - def _set_dataset_aws_resources(dataset: models.Dataset, data, environment): - - bucket_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.S3, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - dataset.S3BucketName = data.get('bucketName') or bucket_name - - glue_db_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.GLUE, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - dataset.GlueDatabaseName = data.get('glueDatabaseName') or glue_db_name - - dataset.KmsAlias = bucket_name - - iam_role_name = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.IAM, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - iam_role_arn = f'arn:aws:iam::{dataset.AwsAccountId}:role/{iam_role_name}' - if data.get('adminRoleName'): - dataset.IAMDatasetAdminRoleArn = ( - f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" - ) - dataset.IAMDatasetAdminUserArn = ( - f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" - ) - else: - dataset.IAMDatasetAdminRoleArn = iam_role_arn - dataset.IAMDatasetAdminUserArn = iam_role_arn - - glue_etl_basename = NamingConventionService( - target_uri=dataset.datasetUri, - target_label=dataset.label, - pattern=NamingConventionPattern.GLUE_ETL, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - - dataset.GlueCrawlerName = f"{glue_etl_basename}-crawler" - dataset.GlueProfilingJobName = f"{glue_etl_basename}-profiler" - dataset.GlueProfilingTriggerSchedule = None - dataset.GlueProfilingTriggerName = f"{glue_etl_basename}-trigger" - dataset.GlueDataQualityJobName = f"{glue_etl_basename}-dataquality" - dataset.GlueDataQualitySchedule = None - dataset.GlueDataQualityTriggerName = f"{glue_etl_basename}-dqtrigger" - return dataset - - @staticmethod - def create_dataset_stack(session, dataset: models.Dataset) -> models.Stack: - return Stack.create_stack( - session=session, - environment_uri=dataset.environmentUri, - target_uri=dataset.datasetUri, - target_label=dataset.label, - target_type='dataset', - payload={ - 'bucket_name': dataset.S3BucketName, - 'database_name': dataset.GlueDatabaseName, - 'role_name': dataset.S3BucketName, - 'user_name': dataset.S3BucketName, - }, - ) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - def get_dataset( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Dataset: - return Dataset.get_dataset_by_uri(session, uri) - - @staticmethod - def get_dataset_by_uri(session, dataset_uri) -> models.Dataset: - dataset: Dataset = session.query(models.Dataset).get(dataset_uri) - if not dataset: - raise exceptions.ObjectNotFound('Dataset', dataset_uri) - return dataset - - @staticmethod - def query_user_datasets(session, username, groups, filter) -> Query: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - query = ( - session.query(models.Dataset) - .outerjoin( - models.ShareObject, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .outerjoin( - models.ShareObjectItem, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri - ) - .filter( - or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), - models.Dataset.stewards.in_(groups), - and_( - models.ShareObject.principalId.in_(groups), - models.ShareObjectItem.status.in_(share_item_shared_states), - ), - and_( - models.ShareObject.owner == username, - models.ShareObjectItem.status.in_(share_item_shared_states), - ), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.Dataset.description.ilike(filter.get('term') + '%%'), - models.Dataset.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def paginated_user_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Dataset.query_user_datasets(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def paginated_dataset_locations( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - query = session.query(models.DatasetStorageLocation).filter( - models.DatasetStorageLocation.datasetUri == uri - ) - if data and data.get('term'): - query = query.filter( - or_( - *[ - models.DatasetStorageLocation.name.ilike( - '%' + data.get('term') + '%' - ), - models.DatasetStorageLocation.S3Prefix.ilike( - '%' + data.get('term') + '%' - ), - ] - ) - ) - return paginate( - query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) - ).to_dict() - - @staticmethod - def paginated_dataset_tables( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - query = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == uri, - models.DatasetTable.LastGlueTableStatus != 'Deleted', - ) - ) - .order_by(models.DatasetTable.created.desc()) - ) - if data and data.get('term'): - query = query.filter( - or_( - *[ - models.DatasetTable.name.ilike('%' + data.get('term') + '%'), - models.DatasetTable.GlueTableName.ilike( - '%' + data.get('term') + '%' - ), - ] - ) - ) - return paginate( - query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) - ).to_dict() - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET) - def update_dataset( - session, username, groups, uri, data=None, check_perm=None - ) -> models.Dataset: - dataset: models.Dataset = Dataset.get_dataset_by_uri(session, uri) - if data and isinstance(data, dict): - for k in data.keys(): - if k != 'stewards': - setattr(dataset, k, data.get(k)) - if data.get('KmsAlias') not in ["Undefined"]: - dataset.KmsAlias = "SSE-S3" if data.get('KmsAlias') == "" else data.get('KmsAlias') - dataset.importedKmsKey = False if data.get('KmsAlias') == "" else True - if data.get('stewards') and data.get('stewards') != dataset.stewards: - if data.get('stewards') != dataset.SamlAdminGroupName: - Dataset.transfer_stewardship_to_new_stewards( - session, dataset, data['stewards'] - ) - dataset.stewards = data['stewards'] - else: - Dataset.transfer_stewardship_to_owners(session, dataset) - dataset.stewards = dataset.SamlAdminGroupName - - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=permissions.DATASET_ALL, - resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, - ) - Dataset.update_dataset_glossary_terms(session, username, uri, data) - activity = models.Activity( - action='dataset:update', - label='dataset:update', - owner=username, - summary=f'{username} updated dataset {dataset.name}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - session.commit() - return dataset - - @staticmethod - def transfer_stewardship_to_owners(session, dataset): - # Remove Steward Resource Policy on Dataset - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=dataset.datasetUri, - ) - - # Remove Steward Resource Policy on Dataset Tables - dataset_tables = [t.tableUri for t in Dataset.get_dataset_tables(session, dataset.datasetUri)] - for tableUri in dataset_tables: - if dataset.stewards != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=tableUri, - ) - - # Remove Steward Resource Policy on Dataset Share Objects - dataset_shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset.datasetUri) - .all() - ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=share.shareUri, - ) - return dataset - - @staticmethod - def transfer_stewardship_to_new_stewards(session, dataset, new_stewards): - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=dataset.datasetUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=permissions.DATASET_READ, - resource_uri=dataset.datasetUri, - resource_type=models.Dataset.__name__, - ) - - dataset_tables = [t.tableUri for t in Dataset.get_dataset_tables(session, dataset.datasetUri)] - for tableUri in dataset_tables: - if dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=tableUri, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=permissions.DATASET_TABLE_READ, - resource_uri=tableUri, - resource_type=models.DatasetTable.__name__, - ) - - dataset_shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset.datasetUri) - .all() - ) - if dataset_shares: - for share in dataset_shares: - ResourcePolicy.attach_resource_policy( - session=session, - group=new_stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, - ) - if dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.delete_resource_policy( - session=session, - group=dataset.stewards, - resource_uri=share.shareUri, - ) - return dataset - - @staticmethod - def update_dataset_glossary_terms(session, username, uri, data): - if data.get('terms'): - input_terms = data.get('terms', []) - current_links = session.query(models.TermLink).filter( - models.TermLink.targetUri == uri - ) - for current_link in current_links: - if current_link not in input_terms: - session.delete(current_link) - for nodeUri in input_terms: - term = session.query(models.GlossaryNode).get(nodeUri) - if term: - link = ( - session.query(models.TermLink) - .filter( - models.TermLink.targetUri == uri, - models.TermLink.nodeUri == nodeUri, - ) - .first() - ) - if not link: - new_link = models.TermLink( - targetUri=uri, - nodeUri=nodeUri, - targetType='Dataset', - owner=username, - approvedByOwner=True, - ) - session.add(new_link) - - @staticmethod - def update_bucket_status(session, dataset_uri): - """ - helper method to update the dataset bucket status - """ - dataset = Dataset.get_dataset_by_uri(session, dataset_uri) - dataset.bucketCreated = True - return dataset - - @staticmethod - def update_glue_database_status(session, dataset_uri): - """ - helper method to update the dataset db status - """ - dataset = Dataset.get_dataset_by_uri(session, dataset_uri) - dataset.glueDatabaseCreated = True - - @staticmethod - def get_dataset_tables(session, dataset_uri): - """return the dataset tables""" - return ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset_uri) - .all() - ) - - @staticmethod - def get_dataset_folders(session, dataset_uri): - """return the dataset folders""" - return ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == dataset_uri) - .all() - ) - - @staticmethod - def query_dataset_shares(session, dataset_uri) -> Query: - return session.query(models.ShareObject).filter( - and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.deleted.is_(None), - ) - ) - - @staticmethod - def paginated_dataset_shares( - session, username, groups, uri, data=None, check_perm=None - ) -> [models.ShareObject]: - query = Dataset.query_dataset_shares(session, uri) - return paginate( - query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) - ).to_dict() - - @staticmethod - def list_dataset_shares(session, dataset_uri) -> [models.ShareObject]: - """return the dataset shares""" - query = Dataset.query_dataset_shares(session, dataset_uri) - return query.all() - - @staticmethod - def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [models.ShareObject]: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - query = ( - session.query(models.ShareObject) - .outerjoin( - models.ShareObjectItem, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri - ) - .filter( - and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.deleted.is_(None), - models.ShareObjectItem.status.in_(share_item_shared_states), - ) - ) - ) - return query.all() - - @staticmethod - def list_dataset_redshift_clusters( - session, dataset_uri - ) -> [models.RedshiftClusterDataset]: - """return the dataset clusters""" - return ( - session.query(models.RedshiftClusterDataset) - .filter(models.RedshiftClusterDataset.datasetUri == dataset_uri) - .all() - ) - - @staticmethod - def delete_dataset( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - dataset = Dataset.get_dataset_by_uri(session, uri) - Dataset._delete_dataset_shares_with_no_shared_items(session, uri) - Dataset._delete_dataset_term_links(session, uri) - Dataset._delete_dataset_tables(session, dataset.datasetUri) - Dataset._delete_dataset_locations(session, dataset.datasetUri) - KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') - Vote.delete_votes(session, dataset.datasetUri, 'dataset') - session.delete(dataset) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.SamlAdminGroupName - ) - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - if dataset.SamlAdminGroupName != env.SamlGroupName: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=env.SamlGroupName - ) - if dataset.stewards: - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=dataset.stewards - ) - return True - - @staticmethod - def _delete_dataset_shares_with_no_shared_items(session, dataset_uri): - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - shares = ( - session.query(models.ShareObject) - .outerjoin( - models.ShareObjectItem, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri - ) - .filter( - and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObjectItem.status.notin_(share_item_shared_states), - ) - ) - .all() - ) - for share in shares: - share_items = ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.shareUri == share.shareUri) - .all() - ) - for item in share_items: - session.delete(item) - - share_obj = ( - session.query(models.ShareObject) - .filter(models.ShareObject.shareUri == share.shareUri) - .first() - ) - session.delete(share_obj) - - @staticmethod - def _delete_dataset_term_links(session, uri): - tables = [t.tableUri for t in Dataset.get_dataset_tables(session, uri)] - for tableUri in tables: - term_links = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.targetUri == tableUri, - models.TermLink.targetType == 'DatasetTable', - ) - ) - .all() - ) - for link in term_links: - session.delete(link) - session.commit() - term_links = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.targetUri == uri, - models.TermLink.targetType == 'Dataset', - ) - ) - .all() - ) - for link in term_links: - session.delete(link) - - @staticmethod - def _delete_dataset_tables(session, dataset_uri) -> bool: - tables = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == dataset_uri, - ) - ) - .all() - ) - for table in tables: - table.deleted = datetime.now() - return tables - - @staticmethod - def _delete_dataset_locations(session, dataset_uri) -> bool: - locations = ( - session.query(models.DatasetStorageLocation) - .filter( - and_( - models.DatasetStorageLocation.datasetUri == dataset_uri, - ) - ) - .all() - ) - for location in locations: - session.delete(location) - return True - - @staticmethod - def list_all_datasets(session) -> [models.Dataset]: - return session.query(models.Dataset).all() - - @staticmethod - def list_all_active_datasets(session) -> [models.Dataset]: - return ( - session.query(models.Dataset).filter(models.Dataset.deleted.is_(None)).all() - ) - - @staticmethod - def get_dataset_by_bucket_name(session, bucket) -> [models.Dataset]: - return ( - session.query(models.Dataset) - .filter(models.Dataset.S3BucketName == bucket) - .first() - ) - - @staticmethod - def count_dataset_tables(session, dataset_uri): - return ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset_uri) - .count() - ) - - @staticmethod - def count_dataset_locations(session, dataset_uri): - return ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == dataset_uri) - .count() - ) diff --git a/backend/dataall/db/api/dataset_location.py b/backend/dataall/db/api/dataset_location.py deleted file mode 100644 index ef9f085f3..000000000 --- a/backend/dataall/db/api/dataset_location.py +++ /dev/null @@ -1,204 +0,0 @@ -import logging -from typing import List - -from sqlalchemy import and_, or_ - -from . import has_tenant_perm, has_resource_perm, Glossary -from .. import models, api, paginate, permissions, exceptions -from .dataset import Dataset - -logger = logging.getLogger(__name__) - - -class DatasetStorageLocation: - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET_FOLDER) - def create_dataset_location( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DatasetStorageLocation: - dataset = Dataset.get_dataset_by_uri(session, uri) - exists = ( - session.query(models.DatasetStorageLocation) - .filter( - and_( - models.DatasetStorageLocation.datasetUri == dataset.datasetUri, - models.DatasetStorageLocation.S3Prefix == data['prefix'], - ) - ) - .count() - ) - - if exists: - raise exceptions.ResourceAlreadyExists( - action='Create Folder', - message=f'Folder: {data["prefix"]} already exist on dataset {uri}', - ) - - location = models.DatasetStorageLocation( - datasetUri=dataset.datasetUri, - label=data.get('label'), - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - S3Prefix=data.get('prefix'), - S3BucketName=dataset.S3BucketName, - AWSAccountId=dataset.AwsAccountId, - owner=dataset.owner, - region=dataset.region, - ) - session.add(location) - session.commit() - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - location.locationUri, - 'DatasetStorageLocation', - data.get('terms', []), - ) - - return location - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.LIST_DATASET_FOLDERS) - def list_dataset_locations( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> dict: - query = ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == uri) - .order_by(models.DatasetStorageLocation.created.desc()) - ) - if data.get('term'): - term = data.get('term') - query = query.filter( - models.DatasetStorageLocation.label.ilike('%' + term + '%') - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.LIST_DATASET_FOLDERS) - def get_dataset_location( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DatasetStorageLocation: - return DatasetStorageLocation.get_location_by_uri(session, data['locationUri']) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET_FOLDER) - def update_dataset_location( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DatasetStorageLocation: - - location = data.get( - 'location', - DatasetStorageLocation.get_location_by_uri(session, data['locationUri']), - ) - - for k in data.keys(): - setattr(location, k, data.get(k)) - - if 'terms' in data.keys(): - Glossary.set_glossary_terms_links( - session, - username, - location.locationUri, - 'DatasetStorageLocation', - data.get('terms', []), - ) - return location - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.DELETE_DATASET_FOLDER) - def delete_dataset_location( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): - location = DatasetStorageLocation.get_location_by_uri( - session, data['locationUri'] - ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - share_item = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.itemUri == location.locationUri, - models.ShareObjectItem.status.in_(share_item_shared_states) - ) - ) - .first() - ) - if share_item: - raise exceptions.ResourceShared( - action=permissions.DELETE_DATASET_FOLDER, - message='Revoke all folder shares before deletion', - ) - session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.itemUri == location.locationUri, - ).delete() - - session.delete(location) - Glossary.delete_glossary_terms_links( - session, - target_uri=location.locationUri, - target_type='DatasetStorageLocation', - ) - return True - - @staticmethod - def get_location_by_uri(session, location_uri) -> models.DatasetStorageLocation: - location: DatasetStorageLocation = session.query( - models.DatasetStorageLocation - ).get(location_uri) - if not location: - raise exceptions.ObjectNotFound('Folder', location_uri) - return location - - @staticmethod - def get_location_by_s3_prefix(session, s3_prefix, accountid, region): - location: models.DatasetStorageLocation = ( - session.query(models.DatasetStorageLocation) - .filter( - and_( - models.DatasetStorageLocation.S3Prefix.startswith(s3_prefix), - models.DatasetStorageLocation.AWSAccountId == accountid, - models.DatasetStorageLocation.region == region, - ) - ) - .first() - ) - if not location: - logging.info(f'No location found for {s3_prefix}|{accountid}|{region}') - else: - logging.info(f'Found location {location.locationUri}|{location.S3Prefix}') - return location diff --git a/backend/dataall/db/api/dataset_profiling_run.py b/backend/dataall/db/api/dataset_profiling_run.py deleted file mode 100644 index f1552bc81..000000000 --- a/backend/dataall/db/api/dataset_profiling_run.py +++ /dev/null @@ -1,156 +0,0 @@ -from sqlalchemy import and_ - -from .. import paginate, models -from ..exceptions import ObjectNotFound - - -class DatasetProfilingRun: - def __init__(self): - pass - - @staticmethod - def start_profiling( - session, datasetUri, tableUri=None, GlueTableName=None, GlueJobRunId=None - ): - dataset: models.Dataset = session.query(models.Dataset).get(datasetUri) - if not dataset: - raise ObjectNotFound('Dataset', datasetUri) - - if tableUri and not GlueTableName: - table: models.DatasetTable = session.query(models.DatasetTable).get( - tableUri - ) - if not table: - raise ObjectNotFound('DatasetTable', tableUri) - GlueTableName = table.GlueTableName - - environment: models.Environment = session.query(models.Environment).get( - dataset.environmentUri - ) - if not environment: - raise ObjectNotFound('Environment', dataset.environmentUri) - - run = models.DatasetProfilingRun( - datasetUri=dataset.datasetUri, - status='RUNNING', - AwsAccountId=environment.AwsAccountId, - GlueJobName=dataset.GlueProfilingJobName or 'Unknown', - GlueTriggerSchedule=dataset.GlueProfilingTriggerSchedule, - GlueTriggerName=dataset.GlueProfilingTriggerName, - GlueTableName=GlueTableName, - GlueJobRunId=GlueJobRunId, - owner=dataset.owner, - label=dataset.GlueProfilingJobName or 'Unknown', - ) - - session.add(run) - session.commit() - return run - - @staticmethod - def update_run( - session, - profilingRunUri=None, - GlueJobRunId=None, - GlueJobRunState=None, - results=None, - ): - run = DatasetProfilingRun.get_profiling_run( - session, profilingRunUri=profilingRunUri, GlueJobRunId=GlueJobRunId - ) - if GlueJobRunId: - run.GlueJobRunId = GlueJobRunId - if GlueJobRunState: - run.status = GlueJobRunState - if results: - run.results = results - session.commit() - return run - - @staticmethod - def get_profiling_run( - session, profilingRunUri=None, GlueJobRunId=None, GlueTableName=None - ): - if profilingRunUri: - run: models.DatasetProfilingRun = session.query( - models.DatasetProfilingRun - ).get(profilingRunUri) - else: - run: models.DatasetProfilingRun = ( - session.query(models.DatasetProfilingRun) - .filter(models.DatasetProfilingRun.GlueJobRunId == GlueJobRunId) - .filter(models.DatasetProfilingRun.GlueTableName == GlueTableName) - .first() - ) - return run - - @staticmethod - def list_profiling_runs(session, datasetUri, filter: dict = None): - if not filter: - filter = {} - q = ( - session.query(models.DatasetProfilingRun) - .filter(models.DatasetProfilingRun.datasetUri == datasetUri) - .order_by(models.DatasetProfilingRun.created.desc()) - ) - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) - ).to_dict() - - @staticmethod - def list_table_profiling_runs(session, tableUri, filter): - if not filter: - filter = {} - q = ( - session.query(models.DatasetProfilingRun) - .join( - models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, - ) - .filter( - and_( - models.DatasetTable.tableUri == tableUri, - models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName, - ) - ) - .order_by(models.DatasetProfilingRun.created.desc()) - ) - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) - ).to_dict() - - @staticmethod - def get_table_last_profiling_run(session, tableUri): - return ( - session.query(models.DatasetProfilingRun) - .join( - models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, - ) - .filter(models.DatasetTable.tableUri == tableUri) - .filter( - models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName - ) - .order_by(models.DatasetProfilingRun.created.desc()) - .first() - ) - - @staticmethod - def get_table_last_profiling_run_with_results(session, tableUri): - return ( - session.query(models.DatasetProfilingRun) - .join( - models.DatasetTable, - models.DatasetTable.datasetUri == models.DatasetProfilingRun.datasetUri, - ) - .filter(models.DatasetTable.tableUri == tableUri) - .filter( - models.DatasetTable.GlueTableName - == models.DatasetProfilingRun.GlueTableName - ) - .filter(models.DatasetProfilingRun.results.isnot(None)) - .order_by(models.DatasetProfilingRun.created.desc()) - .first() - ) diff --git a/backend/dataall/db/api/dataset_table.py b/backend/dataall/db/api/dataset_table.py deleted file mode 100644 index 77ee515e3..000000000 --- a/backend/dataall/db/api/dataset_table.py +++ /dev/null @@ -1,365 +0,0 @@ -import logging -from typing import List - -from sqlalchemy.sql import and_ - -from .. import models, api, permissions, exceptions, paginate -from . import has_tenant_perm, has_resource_perm, Glossary, ResourcePolicy, Environment -from ..models import Dataset -from ...utils import json_utils - -logger = logging.getLogger(__name__) - - -class DatasetTable: - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.CREATE_DATASET_TABLE) - def create_dataset_table( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DatasetTable: - dataset = api.Dataset.get_dataset_by_uri(session, uri) - exists = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == uri, - models.DatasetTable.GlueTableName == data['name'], - ) - ) - .count() - ) - - if exists: - raise exceptions.ResourceAlreadyExists( - action='Create Table', - message=f'table: {data["name"]} already exist on dataset {uri}', - ) - - table = models.DatasetTable( - datasetUri=uri, - label=data['name'], - name=data['name'], - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - S3BucketName=dataset.S3BucketName, - S3Prefix=data.get('S3Prefix', 'unknown'), - AWSAccountId=dataset.AwsAccountId, - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableConfig=data.get('config'), - GlueTableName=data['name'], - owner=dataset.owner, - region=dataset.region, - ) - session.add(table) - if data.get('terms') is not None: - Glossary.set_glossary_terms_links( - session, username, table.tableUri, 'DatasetTable', data.get('terms', []) - ) - session.commit() - - # ADD DATASET TABLE PERMISSIONS - environment = Environment.get_environment_by_uri(session, dataset.environmentUri) - permission_group = set([dataset.SamlAdminGroupName, environment.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) - for group in permission_group: - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=permissions.DATASET_TABLE_READ, - resource_uri=table.tableUri, - resource_type=models.DatasetTable.__name__, - ) - return table - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - def list_dataset_tables( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> dict: - query = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == uri) - .order_by(models.DatasetTable.created.desc()) - ) - if data.get('term'): - term = data.get('term') - query = query.filter(models.DatasetTable.label.ilike('%' + term + '%')) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - def get_dataset_table( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DatasetTable: - return DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.UPDATE_DATASET_TABLE) - def update_dataset_table( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): - table = data.get( - 'table', - DatasetTable.get_dataset_table_by_uri(session, data['tableUri']), - ) - - for k in [attr for attr in data.keys() if attr != 'term']: - setattr(table, k, data.get(k)) - - if data.get('terms') is not None: - Glossary.set_glossary_terms_links( - session, username, table.tableUri, 'DatasetTable', data.get('terms', []) - ) - - return table - - @staticmethod - @has_tenant_perm(permissions.MANAGE_DATASETS) - @has_resource_perm(permissions.DELETE_DATASET_TABLE) - def delete_dataset_table( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): - table = DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - share_item = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.itemUri == table.tableUri, - models.ShareObjectItem.status.in_(share_item_shared_states) - ) - ) - .first() - ) - if share_item: - raise exceptions.ResourceShared( - action=permissions.DELETE_DATASET_TABLE, - message='Revoke all table shares before deletion', - ) - session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.itemUri == table.tableUri, - ).delete() - session.delete(table) - Glossary.delete_glossary_terms_links( - session, target_uri=table.tableUri, target_type='DatasetTable' - ) - return True - - @staticmethod - def query_dataset_tables_shared_with_env( - session, environment_uri: str, dataset_uri: str - ): - """For a given dataset, returns the list of Tables shared with the environment - This means looking at approved ShareObject items - for the share object associating the dataset and environment - """ - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - env_tables_shared = ( - session.query(models.DatasetTable) # all tables - .join( - models.ShareObjectItem, # found in ShareObjectItem - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .join( - models.ShareObject, # jump to share object - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .filter( - and_( - models.ShareObject.datasetUri == dataset_uri, # for this dataset - models.ShareObject.environmentUri - == environment_uri, # for this environment - models.ShareObjectItem.status.in_(share_item_shared_states), - ) - ) - .all() - ) - - return env_tables_shared - - @staticmethod - def get_dataset_tables_shared_with_env( - session, environment_uri: str, dataset_uri: str - ): - return [ - {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} - for t in DatasetTable.query_dataset_tables_shared_with_env( - session, environment_uri, dataset_uri - ) - ] - - @staticmethod - def get_dataset_table_by_uri(session, table_uri): - table: models.DatasetTable = session.query(models.DatasetTable).get(table_uri) - if not table: - raise exceptions.ObjectNotFound('DatasetTable', table_uri) - return table - - @staticmethod - def sync(session, datasetUri, glue_tables=None): - - dataset: Dataset = session.query(Dataset).get(datasetUri) - if dataset: - existing_tables = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == datasetUri) - .all() - ) - existing_table_names = [e.GlueTableName for e in existing_tables] - existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} - - DatasetTable.update_existing_tables_status(existing_tables, glue_tables) - logger.info( - f'existing_tables={glue_tables}' - ) - for table in glue_tables: - if table['Name'] not in existing_table_names: - logger.info( - f'Storing new table: {table} for dataset db {dataset.GlueDatabaseName}' - ) - updated_table = models.DatasetTable( - datasetUri=dataset.datasetUri, - label=table['Name'], - name=table['Name'], - region=dataset.region, - owner=dataset.owner, - GlueDatabaseName=dataset.GlueDatabaseName, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, - S3Prefix=table.get('StorageDescriptor', {}).get('Location'), - GlueTableName=table['Name'], - LastGlueTableStatus='InSync', - GlueTableProperties=json_utils.to_json( - table.get('Parameters', {}) - ), - ) - session.add(updated_table) - session.commit() - # ADD DATASET TABLE PERMISSIONS - env = Environment.get_environment_by_uri(session, dataset.environmentUri) - permission_group = set([dataset.SamlAdminGroupName, env.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) - for group in permission_group: - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - permissions=permissions.DATASET_TABLE_READ, - resource_uri=updated_table.tableUri, - resource_type=models.DatasetTable.__name__, - ) - else: - logger.info( - f'Updating table: {table} for dataset db {dataset.GlueDatabaseName}' - ) - updated_table: models.DatasetTable = ( - existing_dataset_tables_map.get(table['Name']) - ) - updated_table.GlueTableProperties = json_utils.to_json( - table.get('Parameters', {}) - ) - - DatasetTable.sync_table_columns(session, updated_table, table) - - return True - - @staticmethod - def update_existing_tables_status(existing_tables, glue_tables): - for existing_table in existing_tables: - if existing_table.GlueTableName not in [t['Name'] for t in glue_tables]: - existing_table.LastGlueTableStatus = 'Deleted' - logger.info( - f'Table {existing_table.GlueTableName} status set to Deleted from Glue.' - ) - - @staticmethod - def sync_table_columns(session, dataset_table, glue_table): - - DatasetTable.delete_all_table_columns(session, dataset_table) - - columns = [ - {**item, **{'columnType': 'column'}} - for item in glue_table.get('StorageDescriptor', {}).get('Columns', []) - ] - partitions = [ - {**item, **{'columnType': f'partition_{index}'}} - for index, item in enumerate(glue_table.get('PartitionKeys', [])) - ] - - logger.debug(f'Found columns {columns} for table {dataset_table}') - logger.debug(f'Found partitions {partitions} for table {dataset_table}') - - for col in columns + partitions: - table_col = models.DatasetTableColumn( - name=col['Name'], - description=col.get('Comment', 'No description provided'), - label=col['Name'], - owner=dataset_table.owner, - datasetUri=dataset_table.datasetUri, - tableUri=dataset_table.tableUri, - AWSAccountId=dataset_table.AWSAccountId, - GlueDatabaseName=dataset_table.GlueDatabaseName, - GlueTableName=dataset_table.GlueTableName, - region=dataset_table.region, - typeName=col['Type'], - columnType=col['columnType'], - ) - session.add(table_col) - - @staticmethod - def delete_all_table_columns(session, dataset_table): - session.query(models.DatasetTableColumn).filter( - and_( - models.DatasetTableColumn.GlueDatabaseName - == dataset_table.GlueDatabaseName, - models.DatasetTableColumn.GlueTableName == dataset_table.GlueTableName, - ) - ).delete() - session.commit() - - @staticmethod - def get_table_by_s3_prefix(session, s3_prefix, accountid, region): - table: models.DatasetTable = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.S3Prefix.startswith(s3_prefix), - models.DatasetTable.AWSAccountId == accountid, - models.DatasetTable.region == region, - ) - ) - .first() - ) - if not table: - logging.info(f'No table found for {s3_prefix}|{accountid}|{region}') - else: - logging.info( - f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' - ) - return table diff --git a/backend/dataall/db/api/environment.py b/backend/dataall/db/api/environment.py deleted file mode 100644 index cac9f2bed..000000000 --- a/backend/dataall/db/api/environment.py +++ /dev/null @@ -1,1505 +0,0 @@ -import logging -import re - -from sqlalchemy import or_, case, func -from sqlalchemy.orm import Query -from sqlalchemy.sql import and_ - -from .. import exceptions, permissions, models, api -from . import ( - has_resource_perm, - has_tenant_perm, - ResourcePolicy, - Permission, - KeyValueTag -) -from ..api.organization import Organization -from ..models import EnvironmentGroup -from ..models.Enums import ( - ShareableType, - EnvironmentType, - EnvironmentPermission, - PrincipalType - -) -from ..models.Permission import PermissionType -from ..paginator import Page, paginate -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) - -log = logging.getLogger(__name__) - - -class Environment: - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.LINK_ENVIRONMENT) - def create_environment(session, username, groups, uri, data=None, check_perm=None): - Environment._validate_creation_params(data, uri) - organization = Organization.get_organization_by_uri(session, uri) - env = models.Environment( - organizationUri=data.get('organizationUri'), - label=data.get('label', 'Unnamed'), - tags=data.get('tags', []), - owner=username, - description=data.get('description', ''), - environmentType=data.get('type', EnvironmentType.Data.value), - AwsAccountId=data.get('AwsAccountId'), - region=data.get('region'), - SamlGroupName=data['SamlGroupName'], - validated=False, - isOrganizationDefaultEnvironment=False, - userRoleInEnvironment=EnvironmentPermission.Owner.value, - EnvironmentDefaultIAMRoleName=data.get( - 'EnvironmentDefaultIAMRoleName', 'unknown' - ), - EnvironmentDefaultIAMRoleArn=f'arn:aws:iam::{data.get("AwsAccountId")}:role/{data.get("EnvironmentDefaultIAMRoleName")}', - CDKRoleArn=f"arn:aws:iam::{data.get('AwsAccountId')}:role/{data['cdk_role_name']}", - dashboardsEnabled=data.get('dashboardsEnabled', False), - notebooksEnabled=data.get('notebooksEnabled', True), - mlStudiosEnabled=data.get('mlStudiosEnabled', True), - pipelinesEnabled=data.get('pipelinesEnabled', True), - warehousesEnabled=data.get('warehousesEnabled', True), - resourcePrefix=data.get('resourcePrefix'), - ) - session.add(env) - session.commit() - - env.EnvironmentDefaultBucketName = NamingConventionService( - target_uri=env.environmentUri, - target_label=env.label, - pattern=NamingConventionPattern.S3, - resource_prefix=env.resourcePrefix, - ).build_compliant_name() - - env.EnvironmentDefaultAthenaWorkGroup = NamingConventionService( - target_uri=env.environmentUri, - target_label=env.label, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=env.resourcePrefix, - ).build_compliant_name() - - if not data.get('EnvironmentDefaultIAMRoleName'): - env_role_name = NamingConventionService( - target_uri=env.environmentUri, - target_label=env.label, - pattern=NamingConventionPattern.IAM, - resource_prefix=env.resourcePrefix, - ).build_compliant_name() - env.EnvironmentDefaultIAMRoleName = env_role_name - env.EnvironmentDefaultIAMRoleArn = ( - f'arn:aws:iam::{env.AwsAccountId}:role/{env_role_name}' - ) - env.EnvironmentDefaultIAMRoleImported = False - else: - env.EnvironmentDefaultIAMRoleName = data['EnvironmentDefaultIAMRoleName'] - env.EnvironmentDefaultIAMRoleArn = f'arn:aws:iam::{env.AwsAccountId}:role/{env.EnvironmentDefaultIAMRoleName}' - env.EnvironmentDefaultIAMRoleImported = True - - if data.get('vpcId'): - vpc = models.Vpc( - environmentUri=env.environmentUri, - region=env.region, - AwsAccountId=env.AwsAccountId, - VpcId=data.get('vpcId'), - privateSubnetIds=data.get('privateSubnetIds', []), - publicSubnetIds=data.get('publicSubnetIds', []), - SamlGroupName=data['SamlGroupName'], - owner=username, - label=f"{env.name}-{data.get('vpcId')}", - name=f"{env.name}-{data.get('vpcId')}", - default=True, - ) - session.add(vpc) - session.commit() - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlGroupName'], - permissions=permissions.NETWORK_ALL, - resource_uri=vpc.vpcUri, - resource_type=models.Vpc.__name__, - ) - env_group = models.EnvironmentGroup( - environmentUri=env.environmentUri, - groupUri=data['SamlGroupName'], - groupRoleInEnvironment=EnvironmentPermission.Owner.value, - environmentIAMRoleArn=env.EnvironmentDefaultIAMRoleArn, - environmentIAMRoleName=env.EnvironmentDefaultIAMRoleName, - environmentAthenaWorkGroup=env.EnvironmentDefaultAthenaWorkGroup, - ) - session.add(env_group) - ResourcePolicy.attach_resource_policy( - session=session, - resource_uri=env.environmentUri, - group=data['SamlGroupName'], - permissions=permissions.ENVIRONMENT_ALL, - resource_type=models.Environment.__name__, - ) - session.commit() - - activity = models.Activity( - action='ENVIRONMENT:CREATE', - label='ENVIRONMENT:CREATE', - owner=username, - summary=f'{username} linked environment {env.AwsAccountId} to organization {organization.name}', - targetUri=env.environmentUri, - targetType='env', - ) - session.add(activity) - return env - - @staticmethod - def _validate_creation_params(data, uri): - if not uri: - raise exceptions.RequiredParameter('organizationUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('group') - Environment._validate_resource_prefix(data) - - @staticmethod - def _validate_resource_prefix(data): - if data.get('resourcePrefix') and not bool( - re.match(r'^[a-z-]+$', data.get('resourcePrefix')) - ): - raise exceptions.InvalidInput( - 'resourcePrefix', - data.get('resourcePrefix'), - 'must match the pattern ^[a-z-]+$', - ) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.UPDATE_ENVIRONMENT) - def update_environment(session, username, groups, uri, data=None, check_perm=None): - Environment._validate_resource_prefix(data) - environment = Environment.get_environment_by_uri(session, uri) - if data.get('label'): - environment.label = data.get('label') - if data.get('description'): - environment.description = data.get('description', 'No description provided') - if data.get('tags'): - environment.tags = data.get('tags') - if 'dashboardsEnabled' in data.keys(): - environment.dashboardsEnabled = data.get('dashboardsEnabled') - if 'notebooksEnabled' in data.keys(): - environment.notebooksEnabled = data.get('notebooksEnabled') - if 'mlStudiosEnabled' in data.keys(): - environment.mlStudiosEnabled = data.get('mlStudiosEnabled') - if 'pipelinesEnabled' in data.keys(): - environment.pipelinesEnabled = data.get('pipelinesEnabled') - if 'warehousesEnabled' in data.keys(): - environment.warehousesEnabled = data.get('warehousesEnabled') - if data.get('resourcePrefix'): - environment.resourcePrefix = data.get('resourcePrefix') - - ResourcePolicy.attach_resource_policy( - session=session, - resource_uri=environment.environmentUri, - group=environment.SamlGroupName, - permissions=permissions.ENVIRONMENT_ALL, - resource_type=models.Environment.__name__, - ) - return environment - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.INVITE_ENVIRONMENT_GROUP) - def invite_group( - session, username, groups, uri, data=None, check_perm=None - ) -> (models.Environment, models.EnvironmentGroup): - Environment.validate_invite_params(data) - - group: str = data['groupUri'] - - Environment.validate_permissions(session, uri, data['permissions'], group) - - environment = Environment.get_environment_by_uri(session, uri) - - group_membership = Environment.find_environment_group( - session, group, environment.environmentUri - ) - if group_membership: - raise exceptions.UnauthorizedOperation( - action='INVITE_TEAM', - message=f'Team {group} is already a member of the environment {environment.name}', - ) - - if data.get('environmentIAMRoleName'): - env_group_iam_role_name = data['environmentIAMRoleName'] - env_role_imported = True - else: - env_group_iam_role_name = NamingConventionService( - target_uri=environment.environmentUri, - target_label=group, - pattern=NamingConventionPattern.IAM, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - env_role_imported = False - - athena_workgroup = NamingConventionService( - target_uri=environment.environmentUri, - target_label=group, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - - environment_group = EnvironmentGroup( - environmentUri=environment.environmentUri, - groupUri=group, - invitedBy=username, - environmentIAMRoleName=env_group_iam_role_name, - environmentIAMRoleArn=f'arn:aws:iam::{environment.AwsAccountId}:role/{env_group_iam_role_name}', - environmentIAMRoleImported=env_role_imported, - environmentAthenaWorkGroup=athena_workgroup, - ) - session.add(environment_group) - session.commit() - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - resource_uri=environment.environmentUri, - permissions=data['permissions'], - resource_type=models.Environment.__name__, - ) - return environment, environment_group - - @staticmethod - def validate_permissions(session, uri, g_permissions, group): - - if permissions.CREATE_DATASET in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_DATASETS) - - if permissions.CREATE_REDSHIFT_CLUSTER in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_REDSHIFT_CLUSTERS) - - if permissions.CREATE_NOTEBOOK in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_NOTEBOOKS) - - if permissions.CREATE_SGMSTUDIO_NOTEBOOK in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS) - - if permissions.INVITE_ENVIRONMENT_GROUP in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_GROUPS) - g_permissions.append(permissions.REMOVE_ENVIRONMENT_GROUP) - - if permissions.ADD_ENVIRONMENT_CONSUMPTION_ROLES in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) - - if permissions.CREATE_SHARE_OBJECT in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - - if permissions.CREATE_NETWORK in g_permissions: - g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) - - g_permissions.append(permissions.RUN_ATHENA_QUERY) - g_permissions.append(permissions.GET_ENVIRONMENT) - g_permissions.append(permissions.LIST_ENVIRONMENT_GROUPS) - g_permissions.append(permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) - g_permissions.append(permissions.LIST_ENVIRONMENT_REDSHIFT_CLUSTERS) - g_permissions.append(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - g_permissions.append(permissions.LIST_ENVIRONMENT_NETWORKS) - g_permissions.append(permissions.CREDENTIALS_ENVIRONMENT) - - g_permissions = list(set(g_permissions)) - - if g_permissions not in permissions.ENVIRONMENT_INVITED: - exceptions.PermissionUnauthorized( - action='INVITE_TEAM', group_name=group, resource_uri=uri - ) - - env_group_permissions = [] - for p in g_permissions: - env_group_permissions.append( - Permission.find_permission_by_name( - session=session, - permission_name=p, - permission_type=PermissionType.RESOURCE.name, - ) - ) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.REMOVE_ENVIRONMENT_GROUP) - def remove_group(session, username, groups, uri, data=None, check_perm=None): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - group: str = data['groupUri'] - - environment = Environment.get_environment_by_uri(session, uri) - - if group == environment.SamlGroupName: - raise exceptions.UnauthorizedOperation( - action='REMOVE_TEAM', - message=f'Team: {group} is the owner of the environment {environment.name}', - ) - - group_env_objects_count = ( - session.query(models.Environment) - .outerjoin( - models.Dataset, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .outerjoin( - models.SagemakerStudioUserProfile, - models.SagemakerStudioUserProfile.environmentUri - == models.Environment.environmentUri, - ) - .outerjoin( - models.RedshiftCluster, - models.RedshiftCluster.environmentUri - == models.Environment.environmentUri, - ) - .outerjoin( - models.DataPipeline, - models.DataPipeline.environmentUri == models.Environment.environmentUri, - ) - .outerjoin( - models.Dashboard, - models.Dashboard.environmentUri == models.Environment.environmentUri, - ) - .outerjoin( - models.WorksheetQueryResult, - models.WorksheetQueryResult.AwsAccountId - == models.Environment.AwsAccountId, - ) - .filter( - and_( - models.Environment.environmentUri == environment.environmentUri, - or_( - models.RedshiftCluster.SamlGroupName == group, - models.Dataset.SamlAdminGroupName == group, - models.SagemakerStudioUserProfile.SamlAdminGroupName == group, - models.DataPipeline.SamlGroupName == group, - models.Dashboard.SamlGroupName == group, - ), - ) - ) - .count() - ) - - if group_env_objects_count > 0: - raise exceptions.EnvironmentResourcesFound( - action='Remove Team', - message=f'Team: {group} has created {group_env_objects_count} resources on this environment.', - ) - - shares_count = ( - session.query(models.ShareObject) - .filter( - and_( - models.ShareObject.principalId == group, - models.ShareObject.principalType == PrincipalType.Group.value - ) - ) - .count() - ) - - if shares_count > 0: - raise exceptions.EnvironmentResourcesFound( - action='Remove Team', - message=f'Team: {group} has created {shares_count} share requests on this environment.', - ) - - group_membership = Environment.find_environment_group( - session, group, environment.environmentUri - ) - if group_membership: - session.delete(group_membership) - session.commit() - - ResourcePolicy.delete_resource_policy( - session=session, - group=group, - resource_uri=environment.environmentUri, - resource_type=models.Environment.__name__, - ) - return environment - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.UPDATE_ENVIRONMENT_GROUP) - def update_group_permissions( - session, username, groups, uri, data=None, check_perm=None - ): - Environment.validate_invite_params(data) - - group = data['groupUri'] - - Environment.validate_permissions(session, uri, data['permissions'], group) - - environment = Environment.get_environment_by_uri(session, uri) - - group_membership = Environment.find_environment_group( - session, group, environment.environmentUri - ) - if not group_membership: - raise exceptions.UnauthorizedOperation( - action='UPDATE_TEAM_ENVIRONMENT_PERMISSIONS', - message=f'Team {group.name} is not a member of the environment {environment.name}', - ) - - ResourcePolicy.delete_resource_policy( - session=session, - group=group, - resource_uri=environment.environmentUri, - resource_type=models.Environment.__name__, - ) - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - resource_uri=environment.environmentUri, - permissions=data['permissions'], - resource_type=models.Environment.__name__, - ) - return environment - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) - def list_group_permissions( - session, username, groups, uri, data=None, check_perm=None - ): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - environment = Environment.get_environment_by_uri(session, uri) - - return ResourcePolicy.get_resource_policy_permissions( - session=session, - group_uri=data['groupUri'], - resource_uri=environment.environmentUri, - ) - - @staticmethod - def list_group_invitation_permissions( - session, username, groups, uri, data=None, check_perm=None - ): - group_invitation_permissions = [] - for p in permissions.ENVIRONMENT_INVITATION_REQUEST: - group_invitation_permissions.append( - Permission.find_permission_by_name( - session=session, - permission_name=p, - permission_type=PermissionType.RESOURCE.name, - ) - ) - return group_invitation_permissions - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.ADD_ENVIRONMENT_CONSUMPTION_ROLES) - def add_consumption_role( - session, username, groups, uri, data=None, check_perm=None - ) -> (models.Environment, models.EnvironmentGroup): - - group: str = data['groupUri'] - IAMRoleArn: str = data['IAMRoleArn'] - environment = Environment.get_environment_by_uri(session, uri) - - alreadyAdded = Environment.find_consumption_roles_by_IAMArn( - session, environment.environmentUri, IAMRoleArn - ) - if alreadyAdded: - raise exceptions.UnauthorizedOperation( - action='ADD_CONSUMPTION_ROLE', - message=f'IAM role {IAMRoleArn} is already added to the environment {environment.name}', - ) - - consumption_role = models.ConsumptionRole( - consumptionRoleName=data['consumptionRoleName'], - environmentUri=environment.environmentUri, - groupUri=group, - IAMRoleArn=IAMRoleArn, - IAMRoleName=IAMRoleArn.split("/")[-1], - ) - - session.add(consumption_role) - session.commit() - - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - resource_uri=consumption_role.consumptionRoleUri, - permissions=permissions.CONSUMPTION_ROLE_ALL, - resource_type=models.ConsumptionRole.__name__, - ) - return consumption_role - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.REMOVE_ENVIRONMENT_CONSUMPTION_ROLE) - def remove_consumption_role(session, username, groups, uri, data=None, check_perm=None): - if not data: - raise exceptions.RequiredParameter('data') - if not uri: - raise exceptions.RequiredParameter('consumptionRoleUri') - - consumption_role = Environment.get_environment_consumption_role(session, uri, data.get('environmentUri')) - - shares_count = ( - session.query(models.ShareObject) - .filter( - and_( - models.ShareObject.principalId == uri, - models.ShareObject.principalType == PrincipalType.ConsumptionRole.value - ) - ) - .count() - ) - - if shares_count > 0: - raise exceptions.EnvironmentResourcesFound( - action='Remove Consumption Role', - message=f'Consumption role: {consumption_role.consumptionRoleName} has created {shares_count} share requests on this environment.', - ) - - if consumption_role: - session.delete(consumption_role) - session.commit() - - ResourcePolicy.delete_resource_policy( - session=session, - group=consumption_role.groupUri, - resource_uri=consumption_role.consumptionRoleUri, - resource_type=models.ConsumptionRole.__name__, - ) - return True - - @staticmethod - def query_user_environments(session, username, groups, filter) -> Query: - query = ( - session.query(models.Environment) - .outerjoin( - models.EnvironmentGroup, - models.Environment.environmentUri - == models.EnvironmentGroup.environmentUri, - ) - .filter( - or_( - models.Environment.owner == username, - models.EnvironmentGroup.groupUri.in_(groups), - ) - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.Environment.label.ilike('%' + term + '%'), - models.Environment.description.ilike('%' + term + '%'), - models.Environment.tags.contains(f'{{{term}}}'), - models.Environment.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - def paginated_user_environments( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_user_environments(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 5), - ).to_dict() - - @staticmethod - def query_user_environment_groups(session, username, groups, uri, filter) -> Query: - query = ( - session.query(models.EnvironmentGroup) - .filter(models.EnvironmentGroup.environmentUri == uri) - .filter(models.EnvironmentGroup.groupUri.in_(groups)) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.EnvironmentGroup.groupUri.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUPS) - def paginated_user_environment_groups( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_user_environment_groups( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 1000), - ).to_dict() - - @staticmethod - def query_all_environment_groups(session, uri, filter) -> Query: - query = session.query(models.EnvironmentGroup).filter( - models.EnvironmentGroup.environmentUri == uri - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.EnvironmentGroup.groupUri.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUPS) - def paginated_all_environment_groups( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_all_environment_groups( - session, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUPS) - def list_environment_groups( - session, username, groups, uri, data=None, check_perm=None - ) -> [str]: - return [ - g.groupUri - for g in Environment.query_user_environment_groups( - session, username, groups, uri, data - ).all() - ] - - @staticmethod - def query_environment_invited_groups( - session, username, groups, uri, filter - ) -> Query: - query = ( - session.query(models.EnvironmentGroup) - .join( - models.Environment, - models.EnvironmentGroup.environmentUri - == models.Environment.environmentUri, - ) - .filter( - and_( - models.Environment.environmentUri == uri, - models.EnvironmentGroup.groupUri - != models.Environment.SamlGroupName, - ) - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.EnvironmentGroup.groupUri.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUPS) - def paginated_environment_invited_groups( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_invited_groups( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_GROUPS) - def list_environment_invited_groups( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return Environment.query_environment_invited_groups( - session, username, groups, uri, data - ).all() - - @staticmethod - def query_user_environment_consumption_roles(session, username, groups, uri, filter) -> Query: - query = ( - session.query(models.ConsumptionRole) - .filter(models.ConsumptionRole.environmentUri == uri) - .filter(models.ConsumptionRole.groupUri.in_(groups)) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.ConsumptionRole.consumptionRoleName.ilike('%' + term + '%'), - ) - ) - if filter and filter.get('groupUri'): - print("filter group") - group = filter['groupUri'] - query = query.filter( - or_( - models.ConsumptionRole.groupUri == group, - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) - def paginated_user_environment_consumption_roles( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_user_environment_consumption_roles( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 1000), - ).to_dict() - - @staticmethod - def query_all_environment_consumption_roles(session, username, groups, uri, filter) -> Query: - query = session.query(models.ConsumptionRole).filter( - models.ConsumptionRole.environmentUri == uri - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.ConsumptionRole.consumptionRoleName.ilike('%' + term + '%'), - ) - ) - if filter and filter.get('groupUri'): - group = filter['groupUri'] - query = query.filter( - or_( - models.ConsumptionRole.groupUri == group, - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) - def paginated_all_environment_consumption_roles( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_all_environment_consumption_roles( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_CONSUMPTION_ROLES) - def list_environment_consumption_roles( - session, username, groups, uri, data=None, check_perm=None - ) -> [str]: - return [ - {"value": g.IAMRoleArn, "label": g.consumptionRoleName} - for g in Environment.query_user_environment_consumption_roles( - session, username, groups, uri, data - ).all() - ] - - @staticmethod - def find_consumption_roles_by_IAMArn( - session, uri, arn - ) -> Query: - return session.query(models.ConsumptionRole).filter( - and_( - models.ConsumptionRole.environmentUri == uri, - models.ConsumptionRole.IAMRoleArn == arn - ) - ).first() - - @staticmethod - def query_environment_datasets(session, username, groups, uri, filter) -> Query: - query = session.query(models.Dataset).filter( - and_( - models.Dataset.environmentUri == uri, - models.Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.description.ilike('%' + term + '%'), - models.Dataset.tags.contains(f'{{{term}}}'), - models.Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - def query_environment_group_datasets(session, username, groups, envUri, groupUri, filter) -> Query: - query = session.query(models.Dataset).filter( - and_( - models.Dataset.environmentUri == envUri, - models.Dataset.SamlAdminGroupName == groupUri, - models.Dataset.deleted.is_(None), - ) - ) - if filter and filter.get('term'): - term = filter['term'] - query = query.filter( - or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.description.ilike('%' + term + '%'), - models.Dataset.tags.contains(f'{{{term}}}'), - models.Dataset.region.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) - def paginated_environment_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_datasets( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def paginated_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_group_datasets( - session, username, groups, envUri, groupUri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - def paginated_shared_with_environment_datasets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObject.principalType.label('principalType'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - models.DatasetStorageLocation, - models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == uri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - - if data.get("uniqueShares", False): - q = q.filter(models.ShareObject.principalType != PrincipalType.ConsumptionRole.value) - q = q.distinct(models.ShareObject.shareUri) - - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def paginated_shared_with_environment_group_datasets( - session, username, groups, envUri, groupUri, data=None, check_perm=None - ) -> dict: - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObject.principalId.label('principalId'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - models.DatasetStorageLocation, - models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == envUri, - models.ShareObject.principalId == groupUri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def query_environment_networks(session, username, groups, uri, filter) -> Query: - query = session.query(models.Vpc).filter( - models.Vpc.environmentUri == uri, - ) - if filter.get('term'): - term = filter.get('term') - query = query.filter( - or_( - models.Vpc.label.ilike('%' + term + '%'), - models.Vpc.VpcId.ilike('%' + term + '%'), - ) - ) - return query - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_NETWORKS) - def paginated_environment_networks( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Environment.query_environment_networks( - session, username, groups, uri, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_DATASETS) - def paginated_environment_data_items( - session, username, groups, uri, data=None, check_perm=None - ): - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - q = ( - session.query( - models.ShareObjectItem.shareUri.label('shareUri'), - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('datasetName'), - models.Dataset.description.label('datasetDescription'), - models.Environment.environmentUri.label('environmentUri'), - models.Environment.name.label('environmentName'), - models.ShareObject.created.label('created'), - models.ShareObjectItem.itemType.label('itemType'), - models.ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), - models.ShareObjectItem.GlueTableName.label('GlueTableName'), - models.ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), - models.Organization.organizationUri.label('organizationUri'), - models.Organization.name.label('organizationName'), - case( - [ - ( - models.ShareObjectItem.itemType - == ShareableType.Table.value, - func.concat( - models.DatasetTable.GlueDatabaseName, - '.', - models.DatasetTable.GlueTableName, - ), - ), - ( - models.ShareObjectItem.itemType - == ShareableType.StorageLocation.value, - func.concat(models.DatasetStorageLocation.name), - ), - ], - else_='XXX XXXX', - ).label('itemAccess'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Dataset, - models.ShareObject.datasetUri == models.Dataset.datasetUri, - ) - .join( - models.Environment, - models.Environment.environmentUri == models.Dataset.environmentUri, - ) - .join( - models.Organization, - models.Organization.organizationUri - == models.Environment.organizationUri, - ) - .outerjoin( - models.DatasetTable, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .outerjoin( - models.DatasetStorageLocation, - models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri, - ) - .filter( - and_( - models.ShareObjectItem.status.in_(share_item_shared_states), - models.ShareObject.environmentUri == uri, - ) - ) - ) - - if data.get('datasetUri'): - datasetUri = data.get('datasetUri') - q = q.filter(models.ShareObject.datasetUri == datasetUri) - - if data.get('itemTypes', None): - itemTypes = data.get('itemTypes') - q = q.filter( - or_(*[models.ShareObjectItem.itemType == t for t in itemTypes]) - ) - if data.get('term'): - term = data.get('term') - q = q.filter(models.ShareObjectItem.itemName.ilike('%' + term + '%')) - - return paginate( - query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def validate_invite_params(data): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - if not data.get('permissions'): - raise exceptions.RequiredParameter('permissions') - - @staticmethod - def find_environment_group(session, group_uri, environment_uri): - try: - env_group = Environment.get_environment_group(session, group_uri, environment_uri) - return env_group - except Exception: - return None - - @staticmethod - def get_environment_group(session, group_uri, environment_uri): - env_group = ( - session.query(models.EnvironmentGroup) - .filter( - ( - and_( - models.EnvironmentGroup.groupUri == group_uri, - models.EnvironmentGroup.environmentUri == environment_uri, - ) - ) - ) - .first() - ) - if not env_group: - raise exceptions.ObjectNotFound( - 'EnvironmentGroup', f'({group_uri},{environment_uri})' - ) - return env_group - - @staticmethod - def get_environment_consumption_role(session, role_uri, environment_uri): - role = ( - session.query(models.ConsumptionRole) - .filter( - ( - and_( - models.ConsumptionRole.consumptionRoleUri == role_uri, - models.ConsumptionRole.environmentUri == environment_uri, - ) - ) - ) - .first() - ) - if not role: - raise exceptions.ObjectNotFound( - 'ConsumptionRoleUri', f'({role_uri},{environment_uri})' - ) - return role - - @staticmethod - def get_environment_by_uri(session, uri) -> models.Environment: - if not uri: - raise exceptions.RequiredParameter('environmentUri') - environment: models.Environment = Environment.find_environment_by_uri( - session, uri - ) - if not environment: - raise exceptions.ObjectNotFound(models.Environment.__name__, uri) - return environment - - @staticmethod - def find_environment_by_uri(session, uri) -> models.Environment: - if not uri: - raise exceptions.RequiredParameter('environmentUri') - environment: models.Environment = session.query(models.Environment).get(uri) - return environment - - @staticmethod - def list_all_active_environments(session) -> [models.Environment]: - """ - Lists all active dataall environments - :param session: - :return: [models.Environment] - """ - environments: [models.Environment] = ( - session.query(models.Environment) - .filter(models.Environment.deleted.is_(None)) - .all() - ) - log.info( - f'Retrieved all active dataall environments {[e.AwsAccountId for e in environments]}' - ) - return environments - - @staticmethod - def list_environment_redshift_clusters_query(session, environment_uri, filter): - q = session.query(models.RedshiftCluster).filter( - models.RedshiftCluster.environmentUri == environment_uri - ) - term = filter.get('term', None) - if term: - q = q.filter( - or_( - models.RedshiftCluster.label.ilike('%' + term + '%'), - models.RedshiftCluster.description.ilike('%' + term + '%'), - ) - ) - return q - - @staticmethod - @has_resource_perm(permissions.LIST_ENVIRONMENT_REDSHIFT_CLUSTERS) - def paginated_environment_redshift_clusters( - session, username, groups, uri, data=None, check_perm=None - ): - query = Environment.list_environment_redshift_clusters_query(session, uri, data) - return paginate( - query=query, - page_size=data.get('pageSize', 10), - page=data.get('page', 1), - ).to_dict() - - @staticmethod - def list_environment_objects(session, environment_uri): - environment_objects = [] - datasets = ( - session.query(models.Dataset.label, models.Dataset.datasetUri) - .filter(models.Dataset.environmentUri == environment_uri) - .all() - ) - notebooks = ( - session.query( - models.SagemakerNotebook.label, - models.SagemakerNotebook.notebookUri, - ) - .filter(models.SagemakerNotebook.environmentUri == environment_uri) - .all() - ) - ml_studios = ( - session.query( - models.SagemakerStudioUserProfile.label, - models.SagemakerStudioUserProfile.sagemakerStudioUserProfileUri, - ) - .filter(models.SagemakerStudioUserProfile.environmentUri == environment_uri) - .all() - ) - redshift_clusters = ( - session.query( - models.RedshiftCluster.label, models.RedshiftCluster.clusterUri - ) - .filter(models.RedshiftCluster.environmentUri == environment_uri) - .all() - ) - pipelines = ( - session.query(models.DataPipeline.label, models.DataPipeline.DataPipelineUri) - .filter(models.DataPipeline.environmentUri == environment_uri) - .all() - ) - dashboards = ( - session.query(models.Dashboard.label, models.Dashboard.dashboardUri) - .filter(models.Dashboard.environmentUri == environment_uri) - .all() - ) - if datasets: - environment_objects.append({'type': 'Datasets', 'data': datasets}) - if notebooks: - environment_objects.append({'type': 'Notebooks', 'data': notebooks}) - if ml_studios: - environment_objects.append({'type': 'MLStudios', 'data': ml_studios}) - if redshift_clusters: - environment_objects.append( - {'type': 'RedshiftClusters', 'data': redshift_clusters} - ) - if pipelines: - environment_objects.append({'type': 'Pipelines', 'data': pipelines}) - if dashboards: - environment_objects.append({'type': 'Dashboards', 'data': dashboards}) - return environment_objects - - @staticmethod - def list_group_datasets(session, username, groups, uri, data=None, check_perm=None): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - return ( - session.query(models.Dataset) - .filter( - and_( - models.Dataset.environmentUri == uri, - models.Dataset.SamlAdminGroupName == data['groupUri'], - ) - ) - .all() - ) - - @staticmethod - @has_resource_perm(permissions.GET_ENVIRONMENT) - def get_stack( - session, username, groups, uri, data=None, check_perm=None - ) -> models.Stack: - return session.query(models.Stack).get(data['stackUri']) - - @staticmethod - def delete_environment(session, username, groups, uri, data=None, check_perm=None): - environment = data.get( - 'environment', Environment.get_environment_by_uri(session, uri) - ) - - environment_objects = Environment.list_environment_objects(session, uri) - - if environment_objects: - raise exceptions.EnvironmentResourcesFound( - action='Delete Environment', - message='Delete all environment related objects before proceeding', - ) - - env_groups = ( - session.query(models.EnvironmentGroup) - .filter(models.EnvironmentGroup.environmentUri == uri) - .all() - ) - for group in env_groups: - - session.delete(group) - - ResourcePolicy.delete_resource_policy( - session=session, - resource_uri=uri, - group=group.groupUri, - ) - - env_roles = ( - session.query(models.ConsumptionRole) - .filter(models.ConsumptionRole.environmentUri == uri) - .all() - ) - for role in env_roles: - session.delete(role) - - KeyValueTag.delete_key_value_tags( - session, environment.environmentUri, 'environment' - ) - - env_shared_with_objects = ( - session.query(models.ShareObject) - .filter(models.ShareObject.environmentUri == environment.environmentUri) - .all() - ) - for share in env_shared_with_objects: - ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.shareUri == share.shareUri) - .delete() - ) - session.delete(share) - - return session.delete(environment) - - @staticmethod - def check_group_environment_membership( - session, environment_uri, group, username, user_groups, permission_name - ): - if group and group not in user_groups: - raise exceptions.UnauthorizedOperation( - action=permission_name, - message=f'User: {username} is not a member of the team {group}', - ) - if group not in Environment.list_environment_groups( - session=session, - username=username, - groups=user_groups, - uri=environment_uri, - data={}, - check_perm=True, - ): - raise exceptions.UnauthorizedOperation( - action=permission_name, - message=f'Team: {group} is not a member of the environment {environment_uri}', - ) - - @staticmethod - def check_group_environment_permission( - session, username, groups, uri, group, permission_name - ): - - Environment.check_group_environment_membership( - session=session, - username=username, - user_groups=groups, - group=group, - environment_uri=uri, - permission_name=permission_name, - ) - - ResourcePolicy.check_user_resource_permission( - session=session, - username=username, - groups=[group], - resource_uri=uri, - permission_name=permission_name, - ) diff --git a/backend/dataall/db/api/glossary.py b/backend/dataall/db/api/glossary.py deleted file mode 100644 index 1616141c8..000000000 --- a/backend/dataall/db/api/glossary.py +++ /dev/null @@ -1,487 +0,0 @@ -import logging -from datetime import datetime - -from sqlalchemy import asc, or_, and_, literal, case -from sqlalchemy.orm import with_expression, aliased - -from .. import models, exceptions, permissions, paginate -from .permission_checker import ( - has_tenant_perm, -) -from ..models.Glossary import GlossaryNodeStatus - -logger = logging.getLogger(__name__) - - -class Glossary: - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def create_glossary(session, username, groups, uri, data=None, check_perm=None): - Glossary.validate_params(data) - g: models.GlossaryNode = models.GlossaryNode( - label=data.get('label'), - nodeType='G', - parentUri='', - path='/', - readme=data.get('readme', 'no description available'), - owner=username, - admin=data.get('admin'), - status=GlossaryNodeStatus.approved.value, - ) - session.add(g) - session.commit() - g.path = f'/{g.nodeUri}' - return g - - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def create_category(session, username, groups, uri, data=None, check_perm=None): - Glossary.validate_params(data) - parent: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) - if not parent: - raise exceptions.ObjectNotFound('Glossary', uri) - - cat = models.GlossaryNode( - path=parent.path, - parentUri=parent.nodeUri, - nodeType='C', - label=data.get('label'), - owner=username, - readme=data.get('readme'), - ) - session.add(cat) - session.commit() - cat.path = parent.path + '/' + cat.nodeUri - return cat - - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def create_term(session, username, groups, uri, data=None, check_perm=None): - Glossary.validate_params(data) - parent: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) - if not parent: - raise exceptions.ObjectNotFound('Glossary or Category', uri) - if parent.nodeType not in ['G', 'C']: - raise exceptions.InvalidInput( - 'Term', uri, 'Category or Glossary are required to create a term' - ) - - term = models.GlossaryNode( - path=parent.path, - parentUri=parent.nodeUri, - nodeType='T', - label=data.get('label'), - readme=data.get('readme'), - owner=username, - ) - session.add(term) - session.commit() - term.path = parent.path + '/' + term.nodeUri - return term - - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def delete_node(session, username, groups, uri, data=None, check_perm=None): - count = 0 - node: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) - if not node: - raise exceptions.ObjectNotFound('Node', uri) - node.deleted = datetime.now() - if node.nodeType in ['G', 'C']: - children = session.query(models.GlossaryNode).filter( - and_( - models.GlossaryNode.path.startswith(node.path), - models.GlossaryNode.deleted.is_(None), - ) - ) - count = children.count() + 1 - children.update({'deleted': datetime.now()}, synchronize_session=False) - else: - count = 1 - return count - - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def update_node(session, username, groups, uri, data=None, check_perm=None): - node: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) - if not node: - raise exceptions.ObjectNotFound('Node', uri) - for k in data.keys(): - setattr(node, k, data.get(k)) - return node - - @staticmethod - @has_tenant_perm(permissions.MANAGE_GLOSSARIES) - def link_term(session, username, groups, uri, data=None, check_perm=None): - term: models.GlossaryNode = session.query(models.GlossaryNode).get(uri) - if not term: - raise exceptions.ObjectNotFound('Node', uri) - if term.nodeType != 'T': - raise exceptions.InvalidInput( - 'NodeType', - 'term.nodeType', - 'associations are allowed for Glossary terms only', - ) - - targetUri: str = data['targetUri'] - targetType: str = data['targetType'] - - if targetType == 'Dataset': - target = session.query(models.Dataset).get(targetUri) - elif targetType == 'DatasetTable': - target = session.query(models.DatasetTable).get(targetUri) - elif targetType == 'Folder': - target = session.query(models.DatasetStorageLocation).get(targetUri) - elif targetType == 'Column': - target = session.query(models.DatasetTableColumn).get(targetUri) - elif targetType == 'Dashboard': - target = session.query(models.Dashboard).get(targetUri) - else: - raise exceptions.InvalidInput( - 'NodeType', 'term.nodeType', 'association target type is invalid' - ) - - if not target: - raise exceptions.ObjectNotFound('Association target', uri) - - link = models.TermLink( - owner=username, - approvedByOwner=data.get('approvedByOwner', True), - approvedBySteward=data.get('approvedBySteward', True), - nodeUri=uri, - targetUri=targetUri, - targetType=targetType, - ) - session.add(link) - return link - - @staticmethod - def list_glossaries(session, username, groups, uri, data=None, check_perm=None): - q = session.query(models.GlossaryNode).filter( - models.GlossaryNode.nodeType == 'G', models.GlossaryNode.deleted.is_(None) - ) - term = data.get('term') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike('%' + term + '%'), - models.GlossaryNode.readme.ilike('%' + term + '%'), - ) - ) - return paginate( - q, page_size=data.get('pageSize', 10), page=data.get('page', 1) - ).to_dict() - - @staticmethod - def list_categories(session, username, groups, uri, data=None, check_perm=None): - q = session.query(models.GlossaryNode).filter( - and_( - models.GlossaryNode.parentUri == uri, - models.GlossaryNode.nodeType == 'C', - models.GlossaryNode.deleted.is_(None), - ) - ) - - term = data.get('term') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - return paginate( - q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def list_terms(session, username, groups, uri, data=None, check_perm=None): - q = session.query(models.GlossaryNode).filter( - and_( - models.GlossaryNode.parentUri == uri, - models.GlossaryNode.nodeType == 'T', - models.GlossaryNode.deleted.is_(None), - ) - ) - term = data.get('term') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - return paginate( - q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def hierarchical_search(session, username, groups, uri, data=None, check_perm=None): - q = session.query(models.GlossaryNode).options( - with_expression(models.GlossaryNode.isMatch, literal(True)) - ) - q = q.filter(models.GlossaryNode.deleted.is_(None)) - term = data.get('term', None) - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike('%' + term.upper() + '%'), - models.GlossaryNode.readme.ilike('%' + term.upper() + '%'), - ) - ) - matches = q.subquery('matches') - parents = aliased(models.GlossaryNode, name='parents') - children = aliased(models.GlossaryNode, name='children') - - if term: - parent_expr = case( - [ - ( - or_( - parents.label.ilike(f'%{term}%'), - parents.readme.ilike(f'%{term}%'), - ) - ) - ], - else_=literal(False), - ) - else: - parent_expr = literal(False) - - ascendants = ( - session.query(parents) - .options(with_expression(parents.isMatch, parent_expr)) - .join( - and_( - matches, - matches.c.path.startswith(parents.path), - matches, - matches.c.deleted.is_(None), - ) - ) - ) - - if term: - child_expr = case( - [ - ( - or_( - children.label.ilike(f'%{term}%'), - children.readme.ilike(f'%{term}%'), - ), - and_(children.deleted.is_(None)), - ) - ], - else_=literal(False), - ) - else: - child_expr = literal(False) - - descendants = ( - session.query(children) - .options(with_expression(children.isMatch, child_expr)) - .join( - matches, - children.path.startswith(matches.c.path), - ) - ) - - all = ascendants.union(descendants) - q = all.order_by(models.GlossaryNode.path) - - return paginate( - q, page=data.get('page', 1), page_size=data.get('pageSize', 100) - ).to_dict() - - @staticmethod - def search_terms(session, username, groups, uri, data=None, check_perm=None): - q = session.query(models.GlossaryNode).filter( - models.GlossaryNode.deleted.is_(None) - ) - term = data.get('term') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - q = q.order_by(asc(models.GlossaryNode.path)) - return paginate( - q, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - def validate_params(data): - if not data: - exceptions.RequiredParameter('data') - if not data.get('label'): - exceptions.RequiredParameter('name') - - @staticmethod - def list_node_children(session, source, filter): - q = ( - session.query(models.GlossaryNode) - .filter(models.GlossaryNode.path.startswith(source.path + '/')) - .order_by(asc(models.GlossaryNode.path)) - ) - term = filter.get('term') - nodeType = filter.get('nodeType') - if term: - q = q.filter( - or_( - models.GlossaryNode.label.ilike(term), - models.GlossaryNode.readme.ilike(term), - ) - ) - if nodeType: - q = q.filter(models.GlossaryNode.nodeType == nodeType) - return paginate( - q, page_size=filter.get('pageSize', 10), page=filter.get('page', 1) - ).to_dict() - - @staticmethod - def list_term_associations( - session, username, groups, uri, data=None, check_perm=None - ): - source = data['source'] - filter = data['filter'] - datasets = session.query( - models.Dataset.datasetUri.label('targetUri'), - literal('dataset').label('targetType'), - models.Dataset.label.label('label'), - models.Dataset.name.label('name'), - models.Dataset.description.label('description'), - ) - tables = session.query( - models.DatasetTable.tableUri.label('targetUri'), - literal('table').label('targetType'), - models.DatasetTable.label.label('label'), - models.DatasetTable.name.label('name'), - models.DatasetTable.description.label('description'), - ) - columns = session.query( - models.DatasetTableColumn.columnUri.label('targetUri'), - literal('column').label('targetType'), - models.DatasetTableColumn.label.label('label'), - models.DatasetTableColumn.name.label('name'), - models.DatasetTableColumn.description.label('description'), - ) - folders = session.query( - models.DatasetStorageLocation.locationUri.label('targetUri'), - literal('folder').label('targetType'), - models.DatasetStorageLocation.label.label('label'), - models.DatasetStorageLocation.name.label('name'), - models.DatasetStorageLocation.description.label('description'), - ) - - dashboards = session.query( - models.Dashboard.dashboardUri.label('targetUri'), - literal('dashboard').label('targetType'), - models.Dashboard.label.label('label'), - models.Dashboard.name.label('name'), - models.Dashboard.description.label('description'), - ) - - linked_objects = datasets.union(tables, columns, folders, dashboards).subquery( - 'linked_objects' - ) - - path = models.GlossaryNode.path - q = ( - session.query(models.TermLink) - .options(with_expression(models.TermLink.path, path)) - .join( - models.GlossaryNode, - models.GlossaryNode.nodeUri == models.TermLink.nodeUri, - ) - .join( - linked_objects, models.TermLink.targetUri == linked_objects.c.targetUri - ) - ) - if source.nodeType == 'T': - q = q.filter(models.TermLink.nodeUri == source.nodeUri) - elif source.nodeType in ['C', 'G']: - q = q.filter(models.GlossaryNode.path.startswith(source.path)) - else: - raise Exception(f'InvalidNodeType ({source.nodeUri}/{source.nodeType})') - - term = filter.get('term') - if term: - q = q.filter( - or_( - linked_objects.c.label.ilike('%' + term + '%'), - linked_objects.c.description.ilike(f'%{term}'), - linked_objects.c.targetType.ilike(f'%{term}'), - ) - ) - q = q.order_by(asc(path)) - - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 25) - ).to_dict() - - @staticmethod - def set_glossary_terms_links( - session, username, target_uri, target_type, glossary_terms - ): - current_links = session.query(models.TermLink).filter( - models.TermLink.targetUri == target_uri - ) - for current_link in current_links: - if current_link not in glossary_terms: - session.delete(current_link) - for nodeUri in glossary_terms: - - term = session.query(models.GlossaryNode).get(nodeUri) - if term: - link = ( - session.query(models.TermLink) - .filter( - models.TermLink.targetUri == target_uri, - models.TermLink.nodeUri == nodeUri, - ) - .first() - ) - if not link: - new_link = models.TermLink( - targetUri=target_uri, - nodeUri=nodeUri, - targetType=target_type, - owner=username, - approvedByOwner=True, - ) - session.add(new_link) - session.commit() - - @staticmethod - def get_glossary_terms_links(session, target_uri, target_type): - terms = ( - session.query(models.GlossaryNode) - .join( - models.TermLink, models.TermLink.nodeUri == models.GlossaryNode.nodeUri - ) - .filter( - and_( - models.TermLink.targetUri == target_uri, - models.TermLink.targetType == target_type, - ) - ) - ) - - return paginate(terms, page_size=10000, page=1).to_dict() - - @staticmethod - def delete_glossary_terms_links(session, target_uri, target_type): - term_links = ( - session.query(models.TermLink) - .filter( - and_( - models.TermLink.targetUri == target_uri, - models.TermLink.targetType == target_type, - ) - ) - .all() - ) - for link in term_links: - session.delete(link) diff --git a/backend/dataall/db/api/keyvaluetag.py b/backend/dataall/db/api/keyvaluetag.py deleted file mode 100644 index 90d3d4f97..000000000 --- a/backend/dataall/db/api/keyvaluetag.py +++ /dev/null @@ -1,111 +0,0 @@ -import logging - -from . import TargetType -from .resource_policy import ResourcePolicy -from .. import exceptions -from .. import models - -logger = logging.getLogger(__name__) - - -class KeyValueTag: - @staticmethod - def update_key_value_tags( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> [models.KeyValueTag]: - - if not uri: - raise exceptions.RequiredParameter('targetUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('targetType'): - raise exceptions.RequiredParameter('targetType') - - ResourcePolicy.check_user_resource_permission( - session=session, - username=username, - groups=groups, - resource_uri=uri, - permission_name=TargetType.get_resource_update_permission_name( - data['targetType'] - ), - ) - - tag_keys = [tag['key'].lower() for tag in data.get('tags', [])] - if tag_keys and len(tag_keys) != len(set(tag_keys)): - raise exceptions.UnauthorizedOperation( - action='SAVE_KEY_VALUE_TAGS', - message='Duplicate tag keys found. Please note that Tag keys are case insensitive', - ) - - tags = [] - session.query(models.KeyValueTag).filter( - models.KeyValueTag.targetUri == uri, - models.KeyValueTag.targetType == data['targetType'], - ).delete() - for tag in data.get('tags'): - kv_tag: models.KeyValueTag = models.KeyValueTag( - targetUri=uri, - targetType=data['targetType'], - key=tag['key'], - value=tag['value'], - cascade=tag['cascade'] - ) - tags.append(kv_tag) - session.add(kv_tag) - - return tags - - @staticmethod - def list_key_value_tags( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - ResourcePolicy.check_user_resource_permission( - session=session, - username=username, - groups=groups, - resource_uri=uri, - permission_name=TargetType.get_resource_read_permission_name( - data['targetType'] - ), - ) - return KeyValueTag.find_key_value_tags(session, uri, data['targetType']) - - @staticmethod - def find_key_value_tags(session, target_uri, target_type) -> [models.KeyValueTag]: - return ( - session.query(models.KeyValueTag) - .filter( - models.KeyValueTag.targetUri == target_uri, - models.KeyValueTag.targetType == target_type, - ) - .all() - ) - - @staticmethod - def find_environment_cascade_key_value_tags(session, target_uri) -> [models.KeyValueTag]: - return ( - session.query(models.KeyValueTag) - .filter( - models.KeyValueTag.targetUri == target_uri, - models.KeyValueTag.targetType == 'environment', - models.KeyValueTag.cascade.is_(True), - ) - .all() - ) - - @staticmethod - def delete_key_value_tags(session, target_uri, target_type): - return ( - session.query(models.KeyValueTag) - .filter( - models.KeyValueTag.targetUri == target_uri, - models.KeyValueTag.targetType == target_type, - ) - .delete() - ) diff --git a/backend/dataall/db/api/notebook.py b/backend/dataall/db/api/notebook.py deleted file mode 100644 index de3d712d6..000000000 --- a/backend/dataall/db/api/notebook.py +++ /dev/null @@ -1,154 +0,0 @@ -import logging - -from sqlalchemy import or_ -from sqlalchemy.orm import Query - -from . import ( - has_tenant_perm, - has_resource_perm, - ResourcePolicy, - Environment, -) -from .. import models, exceptions, permissions, paginate -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) -from ...utils.slugify import slugify - -logger = logging.getLogger(__name__) - - -class Notebook: - @staticmethod - @has_tenant_perm(permissions.MANAGE_NOTEBOOKS) - @has_resource_perm(permissions.CREATE_NOTEBOOK) - def create_notebook( - session, username, groups, uri, data=None, check_perm=None - ) -> models.SagemakerNotebook: - - Notebook.validate_params(data) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlAdminGroupName'], - permission_name=permissions.CREATE_NOTEBOOK, - ) - - env = Environment.get_environment_by_uri(session, uri) - - if not env.notebooksEnabled: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_NOTEBOOK, - message=f'Notebooks feature is disabled for the environment {env.label}', - ) - - env_group: models.EnvironmentGroup = data.get( - 'environment', - Environment.get_environment_group( - session, - group_uri=data['SamlAdminGroupName'], - environment_uri=env.environmentUri, - ), - ) - - notebook = models.SagemakerNotebook( - label=data.get('label', 'Untitled'), - environmentUri=env.environmentUri, - description=data.get('description', 'No description provided'), - NotebookInstanceName=slugify(data.get('label'), separator=''), - NotebookInstanceStatus='NotStarted', - AWSAccountId=env.AwsAccountId, - region=env.region, - RoleArn=env_group.environmentIAMRoleArn, - owner=username, - SamlAdminGroupName=data.get('SamlAdminGroupName', env.SamlGroupName), - tags=data.get('tags', []), - VpcId=data.get('VpcId'), - SubnetId=data.get('SubnetId'), - VolumeSizeInGB=data.get('VolumeSizeInGB', 32), - InstanceType=data.get('InstanceType', 'ml.t3.medium'), - ) - session.add(notebook) - session.commit() - - notebook.NotebookInstanceName = NamingConventionService( - target_uri=notebook.notebookUri, - target_label=notebook.label, - pattern=NamingConventionPattern.NOTEBOOK, - resource_prefix=env.resourcePrefix, - ).build_compliant_name() - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=permissions.NOTEBOOK_ALL, - resource_uri=notebook.notebookUri, - resource_type=models.SagemakerNotebook.__name__, - ) - - if env.SamlGroupName != notebook.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=env.SamlGroupName, - permissions=permissions.NOTEBOOK_ALL, - resource_uri=notebook.notebookUri, - resource_type=models.SagemakerNotebook.__name__, - ) - - return notebook - - @staticmethod - def validate_params(data): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('label'): - raise exceptions.RequiredParameter('name') - - @staticmethod - def query_user_notebooks(session, username, groups, filter) -> Query: - query = session.query(models.SagemakerNotebook).filter( - or_( - models.SagemakerNotebook.owner == username, - models.SagemakerNotebook.SamlAdminGroupName.in_(groups), - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.SagemakerNotebook.description.ilike( - filter.get('term') + '%%' - ), - models.SagemakerNotebook.label.ilike(filter.get('term') + '%%'), - ) - ) - return query - - @staticmethod - def paginated_user_notebooks( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Notebook.query_user_notebooks(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.GET_NOTEBOOK) - def get_notebook(session, username, groups, uri, data=None, check_perm=True): - return Notebook.get_notebook_by_uri(session, uri) - - @staticmethod - def get_notebook_by_uri(session, uri) -> models.SagemakerNotebook: - if not uri: - raise exceptions.RequiredParameter('URI') - notebook = session.query(models.SagemakerNotebook).get(uri) - if not notebook: - raise exceptions.ObjectNotFound('SagemakerNotebook', uri) - return notebook diff --git a/backend/dataall/db/api/notification.py b/backend/dataall/db/api/notification.py deleted file mode 100644 index 9f6a72158..000000000 --- a/backend/dataall/db/api/notification.py +++ /dev/null @@ -1,210 +0,0 @@ -from datetime import datetime - -from sqlalchemy import func, and_ - -from .. import models -from ...db import paginate - - -class Notification: - def __init__(self): - pass - - @staticmethod - def notify_share_object_submission( - session, username: str, dataset: models.Dataset, share: models.ShareObject - ): - notifications = [] - # stewards = Notification.get_dataset_stewards(session, dataset) - # for steward in stewards: - notifications.append( - Notification.create( - session=session, - username=dataset.owner, - notification_type=models.NotificationType.SHARE_OBJECT_SUBMITTED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} submitted share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def get_dataset_stewards(session, dataset): - stewards = list() - stewards.append(dataset.SamlAdminGroupName) - stewards.append(dataset.stewards) - return stewards - - @staticmethod - def notify_share_object_approval( - session, username: str, dataset: models.Dataset, share: models.ShareObject - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.SHARE_OBJECT_APPROVED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} approved share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def notify_share_object_rejection( - session, username: str, dataset: models.Dataset, share: models.ShareObject - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.SHARE_OBJECT_REJECTED, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'User {username} approved share request for dataset {dataset.label}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def notify_new_data_available_from_owners( - session, dataset: models.Dataset, share: models.ShareObject, s3_prefix - ): - notifications = [] - targeted_users = Notification.get_share_object_targeted_users( - session, dataset, share - ) - for user in targeted_users: - notifications.append( - Notification.create( - session=session, - username=user, - notification_type=models.NotificationType.DATASET_VERSION, - target_uri=f'{share.shareUri}|{dataset.datasetUri}', - message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} shared by owner {dataset.owner}', - ) - ) - session.add_all(notifications) - return notifications - - @staticmethod - def get_share_object_targeted_users(session, dataset, share): - targeted_users = Notification.get_dataset_stewards( - session=session, dataset=dataset - ) - targeted_users.append(dataset.owner) - targeted_users.append(share.owner) - return targeted_users - - @staticmethod - def create( - session, - username, - notification_type: models.NotificationType, - target_uri, - message, - ) -> models.Notification: - notification = models.Notification( - type=notification_type, - message=message, - username=username, - target_uri=target_uri, - ) - session.add(notification) - session.commit() - return notification - - @staticmethod - def list_my_notifications(session, username): - return ( - session.query(models.Notification) - .filter(models.Notification.username == username) - .order_by(models.Notification.created.desc()) - .all() - ) - - @staticmethod - def paginated_notifications(session, username, filter=None): - if not filter: - filter = {} - q = session.query(models.Notification).filter( - models.Notification.username == username - ) - if filter.get('read'): - q = q.filter( - and_( - models.Notification.is_read == True, - models.Notification.deleted.is_(None), - ) - ) - if filter.get('unread'): - q = q.filter( - and_( - models.Notification.is_read == False, - models.Notification.deleted.is_(None), - ) - ) - if filter.get('archived'): - q = q.filter(models.Notification.deleted.isnot(None)) - return paginate( - q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) - ).to_dict() - - @staticmethod - def count_unread_notifications(session, username): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(models.Notification.username == username) - .filter(models.Notification.is_read == False) - .filter(models.Notification.deleted.is_(None)) - .scalar() - ) - return int(count) - - @staticmethod - def count_read_notifications(session, username): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(models.Notification.username == username) - .filter(models.Notification.is_read == True) - .filter(models.Notification.deleted.is_(None)) - .scalar() - ) - return int(count) - - @staticmethod - def count_deleted_notifications(session, username): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(models.Notification.username == username) - .filter(models.Notification.deleted.isnot(None)) - .scalar() - ) - return int(count) - - @staticmethod - def read_notification(session, notificationUri): - notification = session.query(models.Notification).get(notificationUri) - notification.is_read = True - session.commit() - return True - - @staticmethod - def delete_notification(session, notificationUri): - notification = session.query(models.Notification).get(notificationUri) - if notification: - notification.deleted = datetime.now() - session.commit() - return True diff --git a/backend/dataall/db/api/organization.py b/backend/dataall/db/api/organization.py deleted file mode 100644 index 979dd1095..000000000 --- a/backend/dataall/db/api/organization.py +++ /dev/null @@ -1,379 +0,0 @@ -import logging - -from sqlalchemy import or_, and_ -from sqlalchemy.orm import Query - -from .. import exceptions, permissions, paginate -from .. import models -from . import has_tenant_perm, ResourcePolicy, has_resource_perm -from ..models import OrganizationGroup -from ..models.Enums import OrganisationUserRole -from ..paginator import Page - -logger = logging.getLogger(__name__) - - -class Organization: - @staticmethod - def get_organization_by_uri(session, uri: str) -> models.Organization: - if not uri: - raise exceptions.RequiredParameter(param_name='organizationUri') - org = Organization.find_organization_by_uri(session, uri) - if not org: - raise exceptions.ObjectNotFound('Organization', uri) - return org - - @staticmethod - def find_organization_by_uri(session, uri) -> models.Organization: - return session.query(models.Organization).get(uri) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - def create_organization(session, username, groups, uri, data=None, check_perm=None) -> models.Organization: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('groupUri') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - - org = models.Organization( - label=data.get('label'), - owner=username, - tags=data.get('tags', []), - description=data.get('description', 'No description provided'), - SamlGroupName=data.get('SamlGroupName'), - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - session.commit() - member = models.OrganizationGroup( - organizationUri=org.organizationUri, - groupUri=data['SamlGroupName'], - ) - session.add(member) - - activity = models.Activity( - action='org:create', - label='org:create', - owner=username, - summary=f'{username} create organization {org.name} ', - targetUri=org.organizationUri, - targetType='org', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlGroupName'], - permissions=permissions.ORGANIZATION_ALL, - resource_uri=org.organizationUri, - resource_type=models.Organization.__name__, - ) - - return org - - @staticmethod - @has_resource_perm(permissions.UPDATE_ORGANIZATION) - def update_organization(session, username, groups, uri, data=None, check_perm=None): - organization = Organization.get_organization_by_uri(session, uri) - for field in data.keys(): - setattr(organization, field, data.get(field)) - session.commit() - - activity = models.Activity( - action='org:update', - label='org:create', - owner=username, - summary=f'{username} updated organization {organization.name} ', - targetUri=organization.organizationUri, - targetType='org', - ) - session.add(activity) - ResourcePolicy.attach_resource_policy( - session=session, - group=organization.SamlGroupName, - permissions=permissions.ORGANIZATION_ALL, - resource_uri=organization.organizationUri, - resource_type=models.Organization.__name__, - ) - return organization - - @staticmethod - def query_user_organizations(session, username, groups, filter) -> Query: - query = ( - session.query(models.Organization) - .outerjoin( - models.OrganizationGroup, - models.Organization.organizationUri == models.OrganizationGroup.organizationUri, - ) - .filter( - or_( - models.Organization.owner == username, - models.OrganizationGroup.groupUri.in_(groups), - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.Organization.label.ilike('%' + filter.get('term') + '%'), - models.Organization.description.ilike('%' + filter.get('term') + '%'), - models.Organization.tags.contains(f"{{{filter.get('term')}}}"), - ) - ) - return query - - @staticmethod - def paginated_user_organizations(session, username, groups, uri, data=None, check_perm=None) -> dict: - return paginate( - query=Organization.query_user_organizations(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def query_organization_environments(session, uri, filter) -> Query: - query = session.query(models.Environment).filter(models.Environment.organizationUri == uri) - if filter and filter.get('term'): - query = query.filter( - or_( - models.Environment.label.ilike('%' + filter.get('term') + '%'), - models.Environment.description.ilike('%' + filter.get('term') + '%'), - ) - ) - return query - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.GET_ORGANIZATION) - def paginated_organization_environments(session, username, groups, uri, data=None, check_perm=None) -> dict: - return paginate( - query=Organization.query_organization_environments(session, uri, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.DELETE_ORGANIZATION) - def archive_organization(session, username, groups, uri, data=None, check_perm=None) -> bool: - - org = Organization.get_organization_by_uri(session, uri) - environments = session.query(models.Environment).filter(models.Environment.organizationUri == uri).count() - if environments: - raise exceptions.UnauthorizedOperation( - action='ARCHIVE_ORGANIZATION', - message='The organization you tried to delete has linked environments', - ) - session.delete(org) - ResourcePolicy.delete_resource_policy( - session=session, - group=org.SamlGroupName, - resource_uri=org.organizationUri, - resource_type=models.Organization.__name__, - ) - - return True - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.INVITE_ORGANIZATION_GROUP) - def invite_group( - session, username, groups, uri, data=None, check_perm=None - ) -> (models.Organization, models.OrganizationGroup): - - Organization.validate_invite_params(data) - - group: str = data['groupUri'] - - organization = Organization.get_organization_by_uri(session, uri) - - group_membership = Organization.find_group_membership(session, group, organization) - if group_membership: - raise exceptions.UnauthorizedOperation( - action='INVITE_TEAM', - message=f'Team {group} is already admin of the organization {organization.name}', - ) - org_group = OrganizationGroup( - organizationUri=organization.organizationUri, - groupUri=group, - invitedBy=username, - ) - session.add(org_group) - ResourcePolicy.attach_resource_policy( - session=session, - group=group, - resource_uri=organization.organizationUri, - permissions=permissions.ORGANIZATION_INVITED, - resource_type=models.Organization.__name__, - ) - return organization, org_group - - @staticmethod - def find_group_membership(session, group, organization): - membership = ( - session.query(models.OrganizationGroup) - .filter( - ( - and_( - models.OrganizationGroup.groupUri == group, - models.OrganizationGroup.organizationUri == organization.organizationUri, - ) - ) - ) - .first() - ) - return membership - - @staticmethod - def validate_invite_params(data): - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.REMOVE_ORGANIZATION_GROUP) - def remove_group(session, username, groups, uri, data=None, check_perm=None): - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('groupUri'): - raise exceptions.RequiredParameter('groupUri') - - group: str = data['groupUri'] - - organization = Organization.get_organization_by_uri(session, uri) - - if group == organization.SamlGroupName: - raise exceptions.UnauthorizedOperation( - action='REMOVE_TEAM', - message=f'Team: {group} is the owner of the organization {organization.name}', - ) - - group_env_objects_count = ( - session.query(models.Environment) - .filter( - and_( - models.Environment.organizationUri == organization.organizationUri, - models.Environment.SamlGroupName == group, - ) - ) - .count() - ) - if group_env_objects_count > 0: - raise exceptions.OrganizationResourcesFound( - action='Remove Team', - message=f'Team: {group} has {group_env_objects_count} linked environments on this environment.', - ) - - group_membership = Organization.find_group_membership(session, group, organization) - if group_membership: - session.delete(group_membership) - session.commit() - - ResourcePolicy.delete_resource_policy( - session=session, - group=group, - resource_uri=organization.organizationUri, - resource_type=models.Organization.__name__, - ) - return organization - - @staticmethod - def query_organization_groups(session, uri, filter) -> Query: - query = session.query(models.OrganizationGroup).filter(models.OrganizationGroup.organizationUri == uri) - if filter and filter.get('term'): - query = query.filter( - or_( - models.OrganizationGroup.groupUri.ilike('%' + filter.get('term') + '%'), - ) - ) - return query - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.GET_ORGANIZATION) - def paginated_organization_groups(session, username, groups, uri, data=None, check_perm=None) -> dict: - return paginate( - query=Organization.query_organization_groups(session, uri, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def query_organization_invited_groups(session, organization, filter) -> Query: - query = ( - session.query(models.OrganizationGroup) - .join( - models.Organization, - models.OrganizationGroup.organizationUri == models.Organization.organizationUri, - ) - .filter( - and_( - models.Organization.organizationUri == organization.organizationUri, - models.OrganizationGroup.groupUri != models.Organization.SamlGroupName, - ) - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.OrganizationGroup.groupUri.ilike('%' + filter.get('term') + '%'), - ) - ) - return query - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ORGANIZATIONS) - @has_resource_perm(permissions.GET_ORGANIZATION) - def paginated_organization_invited_groups(session, username, groups, uri, data=None, check_perm=False) -> dict: - organization = Organization.get_organization_by_uri(session, uri) - return paginate( - query=Organization.query_organization_invited_groups(session, organization, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def count_organization_invited_groups(session, uri, group) -> int: - groups = ( - session.query(models.OrganizationGroup) - .filter( - and_( - models.OrganizationGroup.organizationUri == uri, - models.OrganizationGroup.groupUri != group, - ) - ) - .count() - ) - return groups - - @staticmethod - def count_organization_environments(session, uri) -> int: - envs = ( - session.query(models.Environment) - .filter( - models.Environment.organizationUri == uri, - ) - .count() - ) - return envs - - @staticmethod - def find_organization_membership(session, uri, groups) -> int: - groups = ( - session.query(models.OrganizationGroup) - .filter( - and_( - models.OrganizationGroup.organizationUri == uri, - models.OrganizationGroup.groupUri.in_(groups), - ) - ) - .count() - ) - if groups >= 1: - return True - else: - return False diff --git a/backend/dataall/db/api/permission.py b/backend/dataall/db/api/permission.py deleted file mode 100644 index 3be745d52..000000000 --- a/backend/dataall/db/api/permission.py +++ /dev/null @@ -1,168 +0,0 @@ -import logging - -from sqlalchemy import or_ - -from ..paginator import paginate -from .. import models, exceptions, permissions -from ..models.Permission import PermissionType - -logger = logging.getLogger(__name__) - - -class Permission: - @staticmethod - def find_permission_by_name( - session, permission_name: str, permission_type: str - ) -> models.Permission: - if permission_name: - permission = ( - session.query(models.Permission) - .filter( - models.Permission.name == permission_name, - models.Permission.type == permission_type, - ) - .first() - ) - return permission - - @staticmethod - def get_permission_by_name( - session, permission_name: str, permission_type: str - ) -> models.Permission: - if not permission_name: - raise exceptions.RequiredParameter(param_name='permission_name') - permission = Permission.find_permission_by_name( - session, permission_name, permission_type - ) - if not permission: - raise exceptions.ObjectNotFound('Permission', permission_name) - return permission - - @staticmethod - def find_permission_by_uri( - session, permission_uri: str, permission_type: str - ) -> models.Permission: - if permission_uri: - permission = ( - session.query(models.Permission) - .filter( - models.Permission.permissionUri == permission_uri, - models.Permission.type == permission_type, - ) - .first() - ) - return permission - - @staticmethod - def get_permission_by_uri( - session, permission_uri: str, permission_type: str - ) -> models.Permission: - if not permission_uri: - raise exceptions.RequiredParameter(param_name='permission_uri') - permission = Permission.find_permission_by_uri( - session, permission_uri, permission_type - ) - if not permission: - raise exceptions.ObjectNotFound('Permission', permission_uri) - return permission - - @staticmethod - def save_permission( - session, name: str, description: str, permission_type: str - ) -> models.Permission: - if not name: - raise exceptions.RequiredParameter('name') - if not type: - raise exceptions.RequiredParameter('permission_type') - permission = Permission.find_permission_by_name(session, name, permission_type) - if permission: - logger.info(f'Permission {permission.name} already exists') - else: - permission = models.Permission( - name=name, - description=description if description else f'Allows {name}', - type=permission_type, - ) - session.add(permission) - return permission - - @staticmethod - def paginated_tenant_permissions(session, data) -> dict: - if not data: - data = dict() - data['type'] = PermissionType.TENANT - return Permission.paginated_permissions(session, data) - - @staticmethod - def paginated_resource_permissions(session, data) -> dict: - if not data: - data = dict() - data['type'] = PermissionType.RESOURCE - return Permission.paginated_permissions(session, data) - - @staticmethod - def paginated_permissions(session, data) -> dict: - query = session.query(models.Permission) - if data: - if data.get('type'): - query = query.filter(models.Permission.type == data['type']) - if data.get('term'): - term = data['term'] - query = query.filter( - or_( - models.Permission.name.ilike('%' + term + '%'), - models.Permission.description.ilike('%' + term + '%'), - ) - ) - return paginate( - query=query, - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def init_permissions(session): - perms = [] - count_resource_permissions = ( - session.query(models.Permission) - .filter(models.Permission.type == PermissionType.RESOURCE.name) - .count() - ) - - logger.debug(f'count_resource_permissions: {count_resource_permissions}, RESOURCES_ALL: {len(permissions.RESOURCES_ALL_WITH_DESC)}') - - if count_resource_permissions < len(permissions.RESOURCES_ALL_WITH_DESC): - for name, desc in permissions.RESOURCES_ALL_WITH_DESC.items(): - perms.append( - Permission.save_permission( - session, - name=name, - description=desc, - permission_type=PermissionType.RESOURCE.name, - ) - ) - logger.info(f'Saved permission {name} successfully') - logger.info(f'Saved {len(perms)} resource permissions successfully') - - count_tenant_permissions = ( - session.query(models.Permission) - .filter(models.Permission.type == PermissionType.TENANT.name) - .count() - ) - - logger.debug(f'count_tenant_permissions: {count_tenant_permissions}, TENANT_ALL: {len(permissions.TENANT_ALL_WITH_DESC)}') - - if count_tenant_permissions < len(permissions.TENANT_ALL_WITH_DESC): - for name, desc in permissions.TENANT_ALL_WITH_DESC.items(): - perms.append( - Permission.save_permission( - session, - name=name, - description=desc, - permission_type=PermissionType.TENANT.name, - ) - ) - logger.info(f'Saved permission {name} successfully') - logger.info(f'Saved {len(perms)} permissions successfully') - session.commit() - return perms diff --git a/backend/dataall/db/api/permission_checker.py b/backend/dataall/db/api/permission_checker.py deleted file mode 100644 index 6fb69d6dd..000000000 --- a/backend/dataall/db/api/permission_checker.py +++ /dev/null @@ -1,76 +0,0 @@ -from ..api.resource_policy import ResourcePolicy -from ..api.tenant_policy import TenantPolicy - - -def has_resource_perm(permission): - def decorator(f): - static_func = False - try: - f.__func__ - static_func = True - fn = f.__func__ - except AttributeError: - fn = f - - def decorated( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = True, - ): - if check_perm: - ResourcePolicy.check_user_resource_permission( - session=session, - username=username, - groups=groups, - resource_uri=uri, - permission_name=permission, - ) - return fn(session, username, groups, uri, data=data, check_perm=check_perm) - - if static_func: - return staticmethod(decorated) - else: - return decorated - - return decorator - - -def has_tenant_perm(permission): - def decorator(f): - static_func = False - try: - f.__func__ - static_func = True - fn = f.__func__ - except AttributeError: - fn = f - - def decorated( - session, - username: str, - groups: [str], - uri: str = None, - data: dict = None, - check_perm: bool = True, - ): - if check_perm: - TenantPolicy.check_user_tenant_permission( - session=session, - username=username, - groups=groups, - tenant_name='dataall', - permission_name=permission, - ) - return fn( - session, username, groups, uri=uri, data=data, check_perm=check_perm - ) - - if static_func: - return staticmethod(decorated) - else: - return decorated - - return decorator diff --git a/backend/dataall/db/api/pipeline.py b/backend/dataall/db/api/pipeline.py deleted file mode 100644 index 75cfb47d1..000000000 --- a/backend/dataall/db/api/pipeline.py +++ /dev/null @@ -1,354 +0,0 @@ -import logging - -from sqlalchemy import or_, and_ -from sqlalchemy.orm import Query - -from . import ( - Environment, - has_tenant_perm, - has_resource_perm, - ResourcePolicy, -) -from .. import models, exceptions, permissions -from .. import paginate -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) -from ...utils.slugify import slugify - -logger = logging.getLogger(__name__) - - -class Pipeline: - @staticmethod - @has_tenant_perm(permissions.MANAGE_PIPELINES) - @has_resource_perm(permissions.CREATE_PIPELINE) - def create_pipeline( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DataPipeline: - - Pipeline._validate_input(data) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlGroupName'], - permission_name=permissions.CREATE_PIPELINE, - ) - - environment = Environment.get_environment_by_uri(session, uri) - - if not environment.pipelinesEnabled: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_PIPELINE, - message=f'Pipelines feature is disabled for the environment {environment.label}', - ) - - pipeline: models.DataPipeline = models.DataPipeline( - owner=username, - environmentUri=environment.environmentUri, - SamlGroupName=data['SamlGroupName'], - label=data['label'], - description=data.get('description', 'No description provided'), - tags=data.get('tags', []), - AwsAccountId=environment.AwsAccountId, - region=environment.region, - repo=slugify(data['label']), - devStrategy=data['devStrategy'], - template="", - ) - - session.add(pipeline) - session.commit() - - aws_compliant_name = NamingConventionService( - target_uri=pipeline.DataPipelineUri, - target_label=pipeline.label, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - - pipeline.repo = aws_compliant_name - pipeline.name = aws_compliant_name - - activity = models.Activity( - action='PIPELINE:CREATE', - label='PIPELINE:CREATE', - owner=username, - summary=f'{username} created dashboard {pipeline.label} in {environment.label}', - targetUri=pipeline.DataPipelineUri, - targetType='pipeline', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlGroupName'], - permissions=permissions.PIPELINE_ALL, - resource_uri=pipeline.DataPipelineUri, - resource_type=models.DataPipeline.__name__, - ) - - if environment.SamlGroupName != pipeline.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=permissions.PIPELINE_ALL, - resource_uri=pipeline.DataPipelineUri, - resource_type=models.DataPipeline.__name__, - ) - - return pipeline - - @staticmethod - def create_pipeline_environment( - session, - username: str, - groups: [str], - data: dict = None, - check_perm: bool = False, - ) -> models.DataPipelineEnvironment: - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=data['environmentUri'], - group=data['samlGroupName'], - permission_name=permissions.CREATE_PIPELINE, - ) - - environment = Environment.get_environment_by_uri(session, data['environmentUri']) - - if not environment.pipelinesEnabled: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_PIPELINE, - message=f'Pipelines feature is disabled for the environment {environment.label}', - ) - - pipeline = Pipeline.get_pipeline_by_uri(session, data['pipelineUri']) - - pipeline_env: models.DataPipelineEnvironment = models.DataPipelineEnvironment( - owner=username, - label=f"{pipeline.label}-{environment.label}", - environmentUri=environment.environmentUri, - environmentLabel=environment.label, - pipelineUri=pipeline.DataPipelineUri, - pipelineLabel=pipeline.label, - envPipelineUri=f"{pipeline.DataPipelineUri}{environment.environmentUri}{data['stage']}", - AwsAccountId=environment.AwsAccountId, - region=environment.region, - stage=data['stage'], - order=data['order'], - samlGroupName=data['samlGroupName'] - ) - - session.add(pipeline_env) - session.commit() - - return pipeline_env - - @staticmethod - def _validate_input(data): - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - - @staticmethod - def validate_group_membership( - session, environment_uri, pipeline_group, username, groups - ): - if pipeline_group and pipeline_group not in groups: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_PIPELINE, - message=f'User: {username} is not a member of the team {pipeline_group}', - ) - if pipeline_group not in Environment.list_environment_groups( - session=session, - username=username, - groups=groups, - uri=environment_uri, - data=None, - check_perm=True, - ): - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_PIPELINE, - message=f'Team: {pipeline_group} is not a member of the environment {environment_uri}', - ) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_PIPELINES) - @has_resource_perm(permissions.GET_PIPELINE) - def get_pipeline( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DataPipeline: - return Pipeline.get_pipeline_by_uri(session, uri) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_PIPELINES) - @has_resource_perm(permissions.UPDATE_PIPELINE) - def update_pipeline( - session, username, groups, uri, data=None, check_perm=None - ) -> models.DataPipeline: - pipeline: models.DataPipeline = Pipeline.get_pipeline_by_uri(session, uri) - if data: - if isinstance(data, dict): - for k in data.keys(): - setattr(pipeline, k, data.get(k)) - return pipeline - - @staticmethod - def get_pipeline_by_uri(session, uri): - pipeline: models.DataPipeline = session.query(models.DataPipeline).get(uri) - if not pipeline: - raise exceptions.ObjectNotFound('Pipeline', uri) - return pipeline - - @staticmethod - def query_user_pipelines(session, username, groups, filter) -> Query: - query = session.query(models.DataPipeline).filter( - or_( - models.DataPipeline.owner == username, - models.DataPipeline.SamlGroupName.in_(groups), - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.DataPipeline.description.ilike(filter.get('term') + '%%'), - models.DataPipeline.label.ilike(filter.get('term') + '%%'), - ) - ) - if filter and filter.get('region'): - if len(filter.get('region')) > 0: - query = query.filter( - models.DataPipeline.region.in_(filter.get('region')) - ) - if filter and filter.get('tags'): - if len(filter.get('tags')) > 0: - query = query.filter( - or_( - *[models.DataPipeline.tags.any(tag) for tag in filter.get('tags')] - ) - ) - if filter and filter.get('type'): - if len(filter.get('type')) > 0: - query = query.filter( - models.DataPipeline.devStrategy.in_(filter.get('type')) - ) - return query - - @staticmethod - def paginated_user_pipelines( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Pipeline.query_user_pipelines(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def delete(session, username, groups, uri, data=None, check_perm=None) -> bool: - pipeline = Pipeline.get_pipeline_by_uri(session, uri) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=pipeline.SamlGroupName - ) - session.delete(pipeline) - session.commit() - return True - - @staticmethod - @has_tenant_perm(permissions.MANAGE_PIPELINES) - @has_resource_perm(permissions.GET_PIPELINE) - def get_pipeline_environment( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.DataPipeline: - return Pipeline.get_pipeline_environment_by_uri(session, uri) - - @staticmethod - def get_pipeline_environment_by_uri(session, uri): - pipeline_env: models.DataPipelineEnvironment = session.query(models.DataPipelineEnvironment).get(uri) - if not pipeline_env: - raise exceptions.ObjectNotFound('PipelineEnvironment', uri) - return pipeline_env - - @staticmethod - def query_pipeline_environments(session, uri) -> Query: - query = session.query(models.DataPipelineEnvironment).filter( - models.DataPipelineEnvironment.pipelineUri.ilike(uri + '%%'), - ) - return query - - @staticmethod - def delete_pipeline_environments(session, uri) -> bool: - deletedItems = ( - session.query(models.DataPipelineEnvironment).filter( - models.DataPipelineEnvironment.pipelineUri == uri).delete() - ) - session.commit() - return True - - @staticmethod - def delete_pipeline_environment( - session, username, groups, envPipelineUri, check_perm=None - ) -> bool: - deletedItem = ( - session.query(models.DataPipelineEnvironment).filter( - models.DataPipelineEnvironment.envPipelineUri == envPipelineUri).delete() - ) - session.commit() - return True - - @staticmethod - @has_tenant_perm(permissions.MANAGE_PIPELINES) - @has_resource_perm(permissions.UPDATE_PIPELINE) - def update_pipeline_environment( - session, username, groups, uri, data=None, check_perm=None - ) -> models.DataPipelineEnvironment: - pipeline_env = session.query(models.DataPipelineEnvironment).filter( - and_( - models.DataPipelineEnvironment.pipelineUri == data['pipelineUri'], - models.DataPipelineEnvironment.environmentUri == data['environmentUri'], - models.DataPipelineEnvironment.stage == data['stage'] - ) - ).first() - if data: - if isinstance(data, dict): - for k in data.keys(): - print(f"KEY: {k}, VALUE: {data.get(k)}") - setattr(pipeline_env, k, data.get(k)) - return pipeline_env - - @staticmethod - def paginated_pipeline_environments( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Pipeline.query_pipeline_environments(session, uri), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() diff --git a/backend/dataall/db/api/redshift_cluster.py b/backend/dataall/db/api/redshift_cluster.py deleted file mode 100644 index 8dc3c8d63..000000000 --- a/backend/dataall/db/api/redshift_cluster.py +++ /dev/null @@ -1,566 +0,0 @@ -import logging - -from sqlalchemy import and_, or_, literal - -from .. import models, api, exceptions, paginate, permissions -from . import has_resource_perm, ResourcePolicy, DatasetTable, Environment, Dataset -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) -from ...utils.slugify import slugify - -log = logging.getLogger(__name__) - - -class RedshiftCluster: - def __init__(self): - pass - - @staticmethod - @has_resource_perm(permissions.CREATE_REDSHIFT_CLUSTER) - def create(session, username, groups, uri: str, data: dict = None, check_perm=None): - - RedshiftCluster.__validate_cluster_data(data, uri) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlGroupName'], - permission_name=permissions.CREATE_REDSHIFT_CLUSTER, - ) - - environment = Environment.get_environment_by_uri(session, uri) - - if not environment.warehousesEnabled: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_REDSHIFT_CLUSTER, - message=f'Warehouses feature is disabled for the environment {environment.label}', - ) - - data['clusterName'] = slugify(data['label'], separator='') - - RedshiftCluster.validate_none_existing_cluster( - session, data['clusterName'], environment - ) - redshift_cluster = RedshiftCluster.create_redshift_cluster( - session, username, data, environment - ) - return redshift_cluster - - @staticmethod - def create_redshift_cluster( - session, username, cluster_input, environment: models.Environment - ): - redshift_cluster = models.RedshiftCluster( - environmentUri=environment.environmentUri, - organizationUri=environment.organizationUri, - owner=cluster_input.get('owner', username), - label=cluster_input['label'], - description=cluster_input.get('description'), - masterDatabaseName=cluster_input['masterDatabaseName'], - masterUsername=cluster_input['masterUsername'], - databaseName=cluster_input.get('databaseName', 'datahubdb'), - nodeType=cluster_input['nodeType'], - numberOfNodes=cluster_input['numberOfNodes'], - port=cluster_input.get('port') or 5432, - region=environment.region, - AwsAccountId=environment.AwsAccountId, - status='CREATING', - vpc=cluster_input['vpc'], - subnetIds=cluster_input.get('subnetIds'), - securityGroupIds=cluster_input.get('securityGroupIds'), - IAMRoles=[environment.EnvironmentDefaultIAMRoleArn], - tags=cluster_input.get('tags', []), - SamlGroupName=cluster_input['SamlGroupName'], - imported=False, - ) - session.add(redshift_cluster) - session.commit() - - name = NamingConventionService( - target_uri=redshift_cluster.clusterUri, - target_label=redshift_cluster.label, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name() - - redshift_cluster.name = name - redshift_cluster.clusterName = name - redshift_cluster.CFNStackName = f'{name}-stack' - redshift_cluster.CFNStackStatus = 'CREATING' - redshift_cluster.kmsAlias = redshift_cluster.clusterName - redshift_cluster.datahubSecret = f'{redshift_cluster.name}-redshift-dhuser' - redshift_cluster.masterSecret = f'{redshift_cluster.name}-redshift-masteruser' - - activity = models.Activity( - action='redshiftcluster:user:create', - label='redshiftcluster:user:create', - owner=username, - summary=f'{username} ' - f'Created Redshift cluster {redshift_cluster.name} ' - f'on Environment {environment.name}|{environment.AwsAccountId}', - targetUri=redshift_cluster.clusterUri, - targetType='redshiftcluster', - ) - session.add(activity) - session.commit() - - ResourcePolicy.attach_resource_policy( - session=session, - group=redshift_cluster.SamlGroupName, - resource_uri=redshift_cluster.clusterUri, - permissions=permissions.REDSHIFT_CLUSTER_ALL, - resource_type=models.RedshiftCluster.__name__, - ) - if environment.SamlGroupName != redshift_cluster.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=permissions.REDSHIFT_CLUSTER_ALL, - resource_uri=redshift_cluster.clusterUri, - resource_type=models.Dataset.__name__, - ) - return redshift_cluster - - @staticmethod - def __validate_cluster_data(data, uri): - if not data: - raise exceptions.RequiredParameter('input') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('SamlGroupName') - if not uri: - raise exceptions.RequiredParameter('environmentUri') - if not data.get('label'): - raise exceptions.RequiredParameter('name') - - @staticmethod - def validate_none_existing_cluster(session, cluster_name, environment): - existing_cluster = ( - session.query(models.RedshiftCluster) - .filter( - and_( - models.RedshiftCluster.environmentUri == environment.environmentUri, - models.RedshiftCluster.clusterName == cluster_name, - ) - ) - .first() - ) - if existing_cluster: - raise exceptions.ResourceAlreadyExists( - 'Create Redshift cluster', - f'Redshift Cluster {cluster_name} ' - f'is already assigned to this environment {environment.name}', - ) - - @staticmethod - def update(session, context, cluster_input, clusterUri): - cluster = session.query(models.RedshiftCluster).get(clusterUri) - if not cluster: - raise exceptions.ObjectNotFound('RedshiftCluster', clusterUri) - if 'name' in cluster_input.keys(): - cluster.name = cluster_input.get('name') - if 'description' in cluster_input.keys(): - cluster.description = cluster_input.get('description') - return cluster - - @staticmethod - def get_redshift_cluster_by_uri(session, uri) -> models.RedshiftCluster: - if not uri: - raise exceptions.RequiredParameter('ClusterUri') - cluster = session.query(models.RedshiftCluster).get(uri) - if not cluster: - raise exceptions.ObjectNotFound('RedshiftCluster', uri) - return cluster - - @staticmethod - @has_resource_perm(permissions.LIST_REDSHIFT_CLUSTER_DATASETS) - def list_available_datasets( - session, username, groups, uri: str, data: dict = None, check_perm=None - ): - cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( - session, uri - ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - - shared = ( - session.query( - models.ShareObject.datasetUri.label('datasetUri'), - literal(cluster.clusterUri).label('clusterUri'), - ) - .join( - models.RedshiftCluster, - models.RedshiftCluster.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.ShareObjectItem.status.in_(share_item_shared_states), - or_( - models.ShareObject.owner == username, - models.ShareObject.principalId.in_(groups), - ), - ) - ) - .group_by(models.ShareObject.datasetUri, models.RedshiftCluster.clusterUri) - ) - created = ( - session.query( - models.Dataset.datasetUri.label('datasetUri'), - models.RedshiftCluster.clusterUri.label('clusterUri'), - ) - .filter( - and_( - or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), - ), - models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.Dataset.environmentUri - == models.RedshiftCluster.environmentUri, - ) - ) - .group_by(models.Dataset.datasetUri, models.RedshiftCluster.clusterUri) - ) - all_group_datasets_sub_query = shared.union(created).subquery( - 'all_group_datasets_sub_query' - ) - query = ( - session.query(models.Dataset) - .join( - all_group_datasets_sub_query, - models.Dataset.datasetUri == all_group_datasets_sub_query.c.datasetUri, - ) - .outerjoin( - models.RedshiftClusterDataset, - and_( - models.RedshiftClusterDataset.datasetUri - == models.Dataset.datasetUri, - models.RedshiftClusterDataset.clusterUri == cluster.clusterUri, - ), - ) - .filter( - and_( - all_group_datasets_sub_query.c.clusterUri == cluster.clusterUri, - models.RedshiftClusterDataset.datasetUri.is_(None), - models.Dataset.deleted.is_(None), - ) - ) - ) - if data.get('term'): - term = data.get('term') - query = query.filter( - or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.tags.any(term), - models.Dataset.topics.any(term), - ) - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_REDSHIFT_CLUSTER_DATASETS) - def list_cluster_datasets( - session, username, groups, uri: str, data: dict = None, check_perm=None - ): - query = ( - session.query(models.Dataset) - .join( - models.RedshiftClusterDataset, - models.Dataset.datasetUri == models.RedshiftClusterDataset.datasetUri, - ) - .filter( - models.RedshiftClusterDataset.clusterUri == uri, - ) - ) - if data.get('term'): - term = data.get('term') - query = query.filter( - or_( - models.Dataset.label.ilike('%' + term + '%'), - models.Dataset.tags.any(term), - models.Dataset.topics.any(term), - ) - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 10) - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.LIST_REDSHIFT_CLUSTER_DATASETS) - def list_available_cluster_tables( - session, username, groups, uri: str, data: dict = None, check_perm=None - ): - cluster: models.RedshiftCluster = RedshiftCluster.get_redshift_cluster_by_uri( - session, uri - ) - share_item_shared_states = api.ShareItemSM.get_share_item_shared_states() - - shared = ( - session.query( - models.ShareObject.datasetUri.label('datasetUri'), - models.ShareObjectItem.itemUri.label('tableUri'), - literal(cluster.clusterUri).label('clusterUri'), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.RedshiftCluster, - models.RedshiftCluster.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.ShareObjectItem.status.in_(share_item_shared_states), - or_( - models.ShareObject.owner == username, - models.ShareObject.principalId.in_(groups), - ), - ) - ) - .group_by( - models.ShareObject.datasetUri, - models.ShareObjectItem.itemUri, - models.RedshiftCluster.clusterUri, - ) - ) - created = ( - session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('tableUri'), - models.RedshiftCluster.clusterUri.label('clusterUri'), - ) - .join( - models.Dataset, - models.DatasetTable.datasetUri == models.Dataset.datasetUri, - ) - .filter( - and_( - or_( - models.Dataset.owner == username, - models.Dataset.SamlAdminGroupName.in_(groups), - ), - models.RedshiftCluster.clusterUri == cluster.clusterUri, - models.Dataset.environmentUri - == models.RedshiftCluster.environmentUri, - ) - ) - .group_by( - models.DatasetTable.datasetUri, - models.DatasetTable.tableUri, - models.RedshiftCluster.clusterUri, - ) - ) - all_group_tables_sub_query = shared.union(created).subquery( - 'all_group_tables_sub_query' - ) - query = ( - session.query(models.DatasetTable) - .join( - all_group_tables_sub_query, - all_group_tables_sub_query.c.tableUri == models.DatasetTable.tableUri, - ) - .filter( - models.RedshiftCluster.clusterUri == cluster.clusterUri, - ) - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 20) - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.GET_REDSHIFT_CLUSTER) - def get_cluster(session, username, groups, uri, data=None, check_perm=True): - cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - return cluster - - @staticmethod - @has_resource_perm(permissions.ADD_DATASET_TO_REDSHIFT_CLUSTER) - def add_dataset(session, username, groups, uri, data=None, check_perm=True): - cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - - if cluster.status != 'available': - raise exceptions.AWSResourceNotAvailable( - action='ADD DATASET TO REDSHIFT CLUSTER', - message=f'Cluster {cluster.name} is not on available state ({cluster.status})', - ) - - dataset = Dataset.get_dataset_by_uri(session, dataset_uri=data['datasetUri']) - - exists = session.query(models.RedshiftClusterDataset).get( - (uri, data['datasetUri']) - ) - if exists: - raise exceptions.ResourceAlreadyExists( - action='ADD DATASET TO REDSHIFT CLUSTER', - message=f'Dataset {dataset.name} is already loaded to cluster {cluster.name}', - ) - - linked_dataset = models.RedshiftClusterDataset( - clusterUri=uri, datasetUri=data['datasetUri'] - ) - session.add(linked_dataset) - - return cluster, dataset - - @staticmethod - @has_resource_perm(permissions.REMOVE_DATASET_FROM_REDSHIFT_CLUSTER) - def remove_dataset_from_cluster( - session, username, groups, uri, data=None, check_perm=True - ): - cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - session.query(models.RedshiftClusterDatasetTable).filter( - and_( - models.RedshiftClusterDatasetTable.clusterUri == uri, - models.RedshiftClusterDatasetTable.datasetUri == data['datasetUri'], - ) - ).delete() - session.commit() - - dataset = None - exists = session.query(models.RedshiftClusterDataset).get( - (uri, data['datasetUri']) - ) - if exists: - session.delete(exists) - dataset = session.query(models.Dataset).get(data['datasetUri']) - if not dataset: - raise exceptions.ObjectNotFound('Dataset', data['datasetUri']) - - return cluster, dataset - - @staticmethod - def list_all_cluster_datasets(session, clusterUri): - cluster_datasets = ( - session.query(models.RedshiftClusterDataset) - .filter( - models.RedshiftClusterDataset.datasetUri.isnot(None), - models.RedshiftClusterDataset.clusterUri == clusterUri, - ) - .all() - ) - return cluster_datasets - - @staticmethod - def get_cluster_dataset( - session, clusterUri, datasetUri - ) -> models.RedshiftClusterDataset: - cluster_dataset = ( - session.query(models.RedshiftClusterDataset) - .filter( - and_( - models.RedshiftClusterDataset.clusterUri == clusterUri, - models.RedshiftClusterDataset.datasetUri == datasetUri, - ) - ) - .first() - ) - if not cluster_dataset: - raise Exception( - f'Cluster {clusterUri} is not associated to dataset {datasetUri}' - ) - return cluster_dataset - - @staticmethod - def get_cluster_dataset_table( - session, clusterUri, datasetUri, tableUri - ) -> models.RedshiftClusterDatasetTable: - cluster_dataset_table = ( - session.query(models.RedshiftClusterDatasetTable) - .filter( - and_( - models.RedshiftClusterDatasetTable.clusterUri == clusterUri, - models.RedshiftClusterDatasetTable.datasetUri == datasetUri, - models.RedshiftClusterDatasetTable.tableUri == tableUri, - ) - ) - .first() - ) - if not cluster_dataset_table: - log.error(f'Table {tableUri} copy is not enabled on cluster') - return cluster_dataset_table - - @staticmethod - @has_resource_perm(permissions.ENABLE_REDSHIFT_TABLE_COPY) - def enable_copy_table( - session, username, groups, uri, data=None, check_perm=True - ) -> models.RedshiftClusterDatasetTable: - cluster = RedshiftCluster.get_redshift_cluster_by_uri(session, uri) - table = DatasetTable.get_dataset_table_by_uri(session, data['tableUri']) - table = models.RedshiftClusterDatasetTable( - clusterUri=uri, - datasetUri=data['datasetUri'], - tableUri=data['tableUri'], - enabled=True, - schema=data['schema'] or f'datahub_{cluster.clusterUri}', - databaseName=cluster.databaseName, - dataLocation=f's3://{table.S3BucketName}/{data.get("dataLocation")}' - if data.get('dataLocation') - else table.S3Prefix, - ) - session.add(table) - session.commit() - return table - - @staticmethod - @has_resource_perm(permissions.DISABLE_REDSHIFT_TABLE_COPY) - def disable_copy_table( - session, username, groups, uri, data=None, check_perm=True - ) -> bool: - table = ( - session.query(models.RedshiftClusterDatasetTable) - .filter( - and_( - models.RedshiftClusterDatasetTable.clusterUri == uri, - models.RedshiftClusterDatasetTable.datasetUri == data['datasetUri'], - models.RedshiftClusterDatasetTable.tableUri == data['tableUri'], - ) - ) - .first() - ) - session.delete(table) - session.commit() - return True - - @staticmethod - @has_resource_perm(permissions.LIST_REDSHIFT_CLUSTER_DATASETS) - def list_copy_enabled_tables( - session, username, groups, uri, data=None, check_perm=True - ) -> [models.RedshiftClusterDatasetTable]: - q = ( - session.query(models.DatasetTable) - .join( - models.RedshiftClusterDatasetTable, - models.RedshiftClusterDatasetTable.tableUri - == models.DatasetTable.tableUri, - ) - .filter(models.RedshiftClusterDatasetTable.clusterUri == uri) - ) - if data.get('term'): - term = data.get('term') - q = q.filter( - models.DatasetTable.label.ilike('%' + term + '%'), - ) - return paginate( - q, page=data.get('page', 1), page_size=data.get('pageSize', 20) - ).to_dict() - - @staticmethod - def delete_all_cluster_linked_objects(session, clusterUri): - session.query(models.RedshiftClusterDatasetTable).filter( - and_( - models.RedshiftClusterDatasetTable.clusterUri == clusterUri, - ) - ).delete() - session.query(models.RedshiftClusterDataset).filter( - models.RedshiftClusterDataset.clusterUri == clusterUri, - ).delete() - return True diff --git a/backend/dataall/db/api/resource_policy.py b/backend/dataall/db/api/resource_policy.py deleted file mode 100644 index 9ca0f86b9..000000000 --- a/backend/dataall/db/api/resource_policy.py +++ /dev/null @@ -1,275 +0,0 @@ -import logging -from typing import Optional - -from sqlalchemy.sql import and_ - -from .. import exceptions -from .. import models -from . import Permission -from ..models.Permission import PermissionType - -logger = logging.getLogger(__name__) - - -class ResourcePolicy: - @staticmethod - def check_user_resource_permission( - session, username: str, groups: [str], resource_uri: str, permission_name: str - ): - resource_policy = ResourcePolicy.has_user_resource_permission( - session=session, - username=username, - groups=groups, - permission_name=permission_name, - resource_uri=resource_uri, - ) - if not resource_policy: - raise exceptions.ResourceUnauthorized( - username=username, - action=permission_name, - resource_uri=resource_uri, - ) - else: - return resource_policy - - @staticmethod - def has_user_resource_permission( - session, username: str, groups: [str], resource_uri: str, permission_name: str - ) -> Optional[models.ResourcePolicy]: - - if not username or not permission_name or not resource_uri: - return None - - policy: models.ResourcePolicy = ( - session.query(models.ResourcePolicy) - .join( - models.ResourcePolicyPermission, - models.ResourcePolicy.sid == models.ResourcePolicyPermission.sid, - ) - .join( - models.Permission, - models.Permission.permissionUri - == models.ResourcePolicyPermission.permissionUri, - ) - .filter( - and_( - models.ResourcePolicy.principalId.in_(groups), - models.ResourcePolicy.principalType == 'GROUP', - models.Permission.name == permission_name, - models.ResourcePolicy.resourceUri == resource_uri, - ) - ) - .first() - ) - - if not policy: - return None - else: - return policy - - @staticmethod - def has_group_resource_permission( - session, group_uri: str, resource_uri: str, permission_name: str - ) -> Optional[models.ResourcePolicy]: - - if not group_uri or not permission_name or not resource_uri: - return None - - policy: models.ResourcePolicy = ( - session.query(models.ResourcePolicy) - .join( - models.ResourcePolicyPermission, - models.ResourcePolicy.sid == models.ResourcePolicyPermission.sid, - ) - .join( - models.Permission, - models.Permission.permissionUri - == models.ResourcePolicyPermission.permissionUri, - ) - .filter( - and_( - models.ResourcePolicy.principalId == group_uri, - models.ResourcePolicy.principalType == 'GROUP', - models.Permission.name == permission_name, - models.ResourcePolicy.resourceUri == resource_uri, - ) - ) - .first() - ) - - if not policy: - return None - else: - return policy - - @staticmethod - def find_resource_policy( - session, group_uri: str, resource_uri: str - ) -> models.ResourcePolicy: - if not group_uri: - raise exceptions.RequiredParameter(param_name='group') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - resource_policy = ( - session.query(models.ResourcePolicy) - .filter( - and_( - models.ResourcePolicy.principalId == group_uri, - models.ResourcePolicy.resourceUri == resource_uri, - ) - ) - .first() - ) - return resource_policy - - @staticmethod - def attach_resource_policy( - session, - group: str, - permissions: [str], - resource_uri: str, - resource_type: str, - ) -> models.ResourcePolicy: - - ResourcePolicy.validate_attach_resource_policy_params( - group, permissions, resource_uri, resource_type - ) - - policy = ResourcePolicy.save_resource_policy( - session, group, resource_uri, resource_type - ) - - ResourcePolicy.add_permission_to_resource_policy( - session, group, permissions, resource_uri, policy - ) - - return policy - - @staticmethod - def delete_resource_policy( - session, - group: str, - resource_uri: str, - resource_type: str = None, - ) -> bool: - - ResourcePolicy.validate_delete_resource_policy_params(group, resource_uri) - policy = ResourcePolicy.find_resource_policy( - session, group_uri=group, resource_uri=resource_uri - ) - if policy: - for permission in policy.permissions: - session.delete(permission) - session.delete(policy) - session.commit() - - return True - - @staticmethod - def validate_attach_resource_policy_params( - group, permissions, resource_uri, resource_type - ): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not permissions: - raise exceptions.RequiredParameter(param_name='permissions') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - if not resource_type: - raise exceptions.RequiredParameter(param_name='resource_type') - - @staticmethod - def save_resource_policy(session, group, resource_uri, resource_type): - ResourcePolicy.validate_save_resource_policy_params( - group, resource_uri, resource_type - ) - policy = ResourcePolicy.find_resource_policy(session, group, resource_uri) - if not policy: - policy = models.ResourcePolicy( - principalId=group, - principalType='GROUP', - resourceUri=resource_uri, - resourceType=resource_type, - ) - session.add(policy) - session.commit() - return policy - - @staticmethod - def validate_save_resource_policy_params(group, resource_uri, resource_type): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - if not resource_type: - raise exceptions.RequiredParameter(param_name='resource_type') - - @staticmethod - def add_permission_to_resource_policy( - session, group, permissions, resource_uri, policy - ): - ResourcePolicy.validate_add_permission_to_resource_policy_params( - group, permissions, policy, resource_uri - ) - - for permission in permissions: - if not ResourcePolicy.has_group_resource_permission( - session, - group_uri=group, - permission_name=permission, - resource_uri=resource_uri, - ): - ResourcePolicy.associate_permission_to_resource_policy( - session, policy, permission - ) - - @staticmethod - def validate_add_permission_to_resource_policy_params( - group, permissions, policy, resource_uri - ): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not permissions: - raise exceptions.RequiredParameter(param_name='permissions') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - if not policy: - raise exceptions.RequiredParameter(param_name='policy') - - @staticmethod - def validate_delete_resource_policy_params(group, resource_uri): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - - @staticmethod - def associate_permission_to_resource_policy(session, policy, permission): - if not policy: - raise exceptions.RequiredParameter(param_name='policy') - if not permission: - raise exceptions.RequiredParameter(param_name='permission') - policy_permission = models.ResourcePolicyPermission( - sid=policy.sid, - permissionUri=Permission.get_permission_by_name( - session, permission, permission_type=PermissionType.RESOURCE.name - ).permissionUri, - ) - session.add(policy_permission) - session.commit() - - @staticmethod - def get_resource_policy_permissions(session, group_uri, resource_uri): - if not group_uri: - raise exceptions.RequiredParameter(param_name='group_uri') - if not resource_uri: - raise exceptions.RequiredParameter(param_name='resource_uri') - policy = ResourcePolicy.find_resource_policy( - session=session, - group_uri=group_uri, - resource_uri=resource_uri, - ) - permissions = [] - for p in policy.permissions: - permissions.append(p.permission) - return permissions diff --git a/backend/dataall/db/api/sgm_studio_notebook.py b/backend/dataall/db/api/sgm_studio_notebook.py deleted file mode 100644 index d947c5b0d..000000000 --- a/backend/dataall/db/api/sgm_studio_notebook.py +++ /dev/null @@ -1,150 +0,0 @@ -import logging - -from sqlalchemy import or_ -from sqlalchemy.orm import Query - -from .. import exceptions, permissions, paginate, models -from . import ( - has_tenant_perm, - has_resource_perm, - ResourcePolicy, - Environment, -) - -logger = logging.getLogger(__name__) - - -class SgmStudioNotebook: - @staticmethod - @has_tenant_perm(permissions.MANAGE_NOTEBOOKS) - @has_resource_perm(permissions.CREATE_SGMSTUDIO_NOTEBOOK) - def create_notebook(session, username, groups, uri, data=None, check_perm=None): - - SgmStudioNotebook.validate_params(data) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlAdminGroupName'], - permission_name=permissions.CREATE_SGMSTUDIO_NOTEBOOK, - ) - - env: models.Environment = data.get( - 'environment', Environment.get_environment_by_uri(session, uri) - ) - - sm_user_profile = models.SagemakerStudioUserProfile( - label=data.get('label', f'UserProfile-{username}'), - environmentUri=uri, - description=data.get('description', 'No description provided'), - sagemakerStudioUserProfileName=data.get('label', f'up-{username}'), - sagemakerStudioUserProfileStatus='PENDING', - sagemakerStudioDomainID=data['domain_id'], - AWSAccountId=env.AwsAccountId, - region=env.region, - RoleArn=env.EnvironmentDefaultIAMRoleArn, - owner=username, - SamlAdminGroupName=data['SamlAdminGroupName'], - tags=data.get('tags', []), - ) - session.add(sm_user_profile) - session.commit() - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=permissions.SGMSTUDIO_NOTEBOOK_ALL, - resource_uri=sm_user_profile.sagemakerStudioUserProfileUri, - resource_type=models.SagemakerStudioUserProfile.__name__, - ) - - if env.SamlGroupName != sm_user_profile.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=env.SamlGroupName, - permissions=permissions.SGMSTUDIO_NOTEBOOK_ALL, - resource_uri=sm_user_profile.sagemakerStudioUserProfileUri, - resource_type=models.SagemakerStudioUserProfile.__name__, - ) - - return sm_user_profile - - @staticmethod - def validate_params(data): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('label'): - raise exceptions.RequiredParameter('name') - - @staticmethod - def validate_group_membership( - session, environment_uri, notebook_group, username, groups - ): - if notebook_group and notebook_group not in groups: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SGMSTUDIO_NOTEBOOK, - message=f'User: {username} is not a member of the team {notebook_group}', - ) - if notebook_group not in Environment.list_environment_groups( - session=session, - username=username, - groups=groups, - uri=environment_uri, - data=None, - check_perm=True, - ): - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SGMSTUDIO_NOTEBOOK, - message=f'Team: {notebook_group} is not a member of the environment {environment_uri}', - ) - - @staticmethod - def query_user_notebooks(session, username, groups, filter) -> Query: - query = session.query(models.SagemakerStudioUserProfile).filter( - or_( - models.SagemakerStudioUserProfile.owner == username, - models.SagemakerStudioUserProfile.SamlAdminGroupName.in_(groups), - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.SagemakerStudioUserProfile.description.ilike( - filter.get('term') + '%%' - ), - models.SagemakerStudioUserProfile.label.ilike( - filter.get('term') + '%%' - ), - ) - ) - return query - - @staticmethod - def paginated_user_notebooks( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=SgmStudioNotebook.query_user_notebooks( - session, username, groups, data - ), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.GET_SGMSTUDIO_NOTEBOOK) - def get_notebook(session, username, groups, uri, data=None, check_perm=True): - return SgmStudioNotebook.get_notebook_by_uri(session, uri) - - @staticmethod - def get_notebook_by_uri(session, uri) -> models.SagemakerStudioUserProfile: - if not uri: - raise exceptions.RequiredParameter('URI') - notebook = session.query(models.SagemakerStudioUserProfile).get(uri) - if not notebook: - raise exceptions.ObjectNotFound('SagemakerStudioUserProfile', uri) - return notebook diff --git a/backend/dataall/db/api/share_object.py b/backend/dataall/db/api/share_object.py deleted file mode 100644 index b41548a62..000000000 --- a/backend/dataall/db/api/share_object.py +++ /dev/null @@ -1,1433 +0,0 @@ -import logging - -from sqlalchemy import and_, or_, func, case - -from . import ( - has_resource_perm, - ResourcePolicy, - Environment, -) -from .. import api, utils -from .. import models, exceptions, permissions, paginate -from ..models.Enums import ShareObjectStatus, ShareItemStatus, ShareObjectActions, ShareItemActions, ShareableType, PrincipalType - -logger = logging.getLogger(__name__) - - -class Transition: - def __init__(self, name, transitions): - self._name = name - self._transitions = transitions - self._all_source_states = [*set([item for sublist in transitions.values() for item in sublist])] - self._all_target_states = [item for item in transitions.keys()] - - def validate_transition(self, prev_state): - if prev_state in self._all_target_states: - logger.info(f'Resource is already in target state ({prev_state}) in {self._all_target_states}') - return False - elif prev_state not in self._all_source_states: - raise exceptions.UnauthorizedOperation( - action=self._name, - message=f'This transition is not possible, {prev_state} cannot go to {self._all_target_states}. If there is a sharing or revoking in progress wait until it is complete and try again.', - ) - else: - return True - - def get_transition_target(self, prev_state): - if self.validate_transition(prev_state): - for target_state, list_prev_states in self._transitions.items(): - if prev_state in list_prev_states: - return target_state - else: - pass - else: - return prev_state - - -class ShareObjectSM: - def __init__(self, state): - self._state = state - self.transitionTable = { - ShareObjectActions.Submit.value: Transition( - name=ShareObjectActions.Submit.value, - transitions={ - ShareObjectStatus.Submitted.value: [ - ShareObjectStatus.Draft.value, - ShareObjectStatus.Rejected.value - ] - } - ), - ShareObjectActions.Approve.value: Transition( - name=ShareObjectActions.Approve.value, - transitions={ - ShareObjectStatus.Approved.value: [ - ShareObjectStatus.Submitted.value - ] - } - ), - ShareObjectActions.Reject.value: Transition( - name=ShareObjectActions.Reject.value, - transitions={ - ShareObjectStatus.Rejected.value: [ - ShareObjectStatus.Submitted.value - ] - } - ), - ShareObjectActions.RevokeItems.value: Transition( - name=ShareObjectActions.RevokeItems.value, - transitions={ - ShareObjectStatus.Revoked.value: [ - ShareObjectStatus.Draft.value, - ShareObjectStatus.Submitted.value, - ShareObjectStatus.Rejected.value, - ShareObjectStatus.Processed.value - ] - } - ), - ShareObjectActions.Start.value: Transition( - name=ShareObjectActions.Start.value, - transitions={ - ShareObjectStatus.Share_In_Progress.value: [ - ShareObjectStatus.Approved.value - ], - ShareObjectStatus.Revoke_In_Progress.value: [ - ShareObjectStatus.Revoked.value - ] - } - ), - ShareObjectActions.Finish.value: Transition( - name=ShareObjectActions.Finish.value, - transitions={ - ShareObjectStatus.Processed.value: [ - ShareObjectStatus.Share_In_Progress.value, - ShareObjectStatus.Revoke_In_Progress.value - ], - } - ), - ShareObjectActions.FinishPending.value: Transition( - name=ShareObjectActions.FinishPending.value, - transitions={ - ShareObjectStatus.Draft.value: [ - ShareObjectStatus.Revoke_In_Progress.value, - ], - } - ), - ShareObjectActions.Delete.value: Transition( - name=ShareObjectActions.Delete.value, - transitions={ - ShareObjectStatus.Deleted.value: [ - ShareObjectStatus.Rejected.value, - ShareObjectStatus.Draft.value, - ShareObjectStatus.Submitted.value, - ShareObjectStatus.Processed.value - ] - } - ), - ShareItemActions.AddItem.value: Transition( - name=ShareItemActions.AddItem.value, - transitions={ - ShareObjectStatus.Draft.value: [ - ShareObjectStatus.Submitted.value, - ShareObjectStatus.Rejected.value, - ShareObjectStatus.Processed.value - ] - } - ), - } - - def run_transition(self, transition): - trans = self.transitionTable[transition] - new_state = trans.get_transition_target(self._state) - return new_state - - def update_state(self, session, share, new_state): - logger.info(f"Updating share object {share.shareUri} in DB from {self._state} to state {new_state}") - ShareObject.update_share_object_status( - session=session, - shareUri=share.shareUri, - status=new_state - ) - self._state = new_state - return True - - @staticmethod - def get_share_object_refreshable_states(): - return [ - ShareObjectStatus.Approved.value, - ShareObjectStatus.Revoked.value - ] - - -class ShareItemSM: - def __init__(self, state): - self._state = state - self.transitionTable = { - ShareItemActions.AddItem.value: Transition( - name=ShareItemActions.AddItem.value, - transitions={ - ShareItemStatus.PendingApproval.value: [ShareItemStatus.Deleted.value] - } - ), - ShareObjectActions.Submit.value: Transition( - name=ShareObjectActions.Submit.value, - transitions={ - ShareItemStatus.PendingApproval.value: [ - ShareItemStatus.Share_Rejected.value, - ShareItemStatus.Share_Failed.value - ], - ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], - ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], - ShareItemStatus.Share_Approved.value: [ShareItemStatus.Share_Approved.value], - ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], - ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], - ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], - ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], - } - ), - ShareObjectActions.Approve.value: Transition( - name=ShareObjectActions.Approve.value, - transitions={ - ShareItemStatus.Share_Approved.value: [ShareItemStatus.PendingApproval.value], - ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], - ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], - ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], - ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], - ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], - ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], - } - ), - ShareObjectActions.Reject.value: Transition( - name=ShareObjectActions.Reject.value, - transitions={ - ShareItemStatus.Share_Rejected.value: [ShareItemStatus.PendingApproval.value], - ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], - ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], - ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], - ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], - ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], - ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], - } - ), - ShareObjectActions.Start.value: Transition( - name=ShareObjectActions.Start.value, - transitions={ - ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_Approved.value], - ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_Approved.value], - } - ), - ShareItemActions.Success.value: Transition( - name=ShareItemActions.Success.value, - transitions={ - ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_In_Progress.value], - ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_In_Progress.value], - } - ), - ShareItemActions.Failure.value: Transition( - name=ShareItemActions.Failure.value, - transitions={ - ShareItemStatus.Share_Failed.value: [ShareItemStatus.Share_In_Progress.value], - ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_In_Progress.value], - } - ), - ShareItemActions.RemoveItem.value: Transition( - name=ShareItemActions.RemoveItem.value, - transitions={ - ShareItemStatus.Deleted.value: [ - ShareItemStatus.PendingApproval.value, - ShareItemStatus.Share_Rejected.value, - ShareItemStatus.Share_Failed.value, - ShareItemStatus.Revoke_Succeeded.value - ] - } - ), - ShareObjectActions.RevokeItems.value: Transition( - name=ShareObjectActions.RevokeItems.value, - transitions={ - ShareItemStatus.Revoke_Approved.value: [ - ShareItemStatus.Share_Succeeded.value, - ShareItemStatus.Revoke_Failed.value, - ShareItemStatus.Revoke_Approved.value - ] - } - ), - ShareObjectActions.Delete.value: Transition( - name=ShareObjectActions.Delete.value, - transitions={ - ShareItemStatus.Deleted.value: [ - ShareItemStatus.PendingApproval.value, - ShareItemStatus.Share_Rejected.value, - ShareItemStatus.Share_Failed.value, - ShareItemStatus.Revoke_Succeeded.value - ] - } - ) - } - - def run_transition(self, transition): - trans = self.transitionTable[transition] - new_state = trans.get_transition_target(self._state) - return new_state - - def update_state(self, session, share_uri, new_state): - if share_uri and (new_state != self._state): - if new_state == ShareItemStatus.Deleted.value: - logger.info(f"Deleting share items in DB in {self._state} state") - ShareObject.delete_share_item_status_batch( - session=session, - share_uri=share_uri, - status=self._state - ) - else: - logger.info(f"Updating share items in DB from {self._state} to state {new_state}") - ShareObject.update_share_item_status_batch( - session=session, - share_uri=share_uri, - old_status=self._state, - new_status=new_state - ) - self._state = new_state - else: - logger.info(f"Share Items in DB already in target state {new_state} or no update is required") - return True - - def update_state_single_item(self, session, share_item, new_state): - logger.info(f"Updating share item in DB {share_item.shareItemUri} status to {new_state}") - ShareObject.update_share_item_status( - session=session, - uri=share_item.shareItemUri, - status=new_state - ) - self._state = new_state - return True - - @staticmethod - def get_share_item_shared_states(): - return [ - ShareItemStatus.Share_Succeeded.value, - ShareItemStatus.Share_In_Progress.value, - ShareItemStatus.Revoke_Failed.value, - ShareItemStatus.Revoke_In_Progress.value, - ShareItemStatus.Revoke_Approved.value - ] - - @staticmethod - def get_share_item_revokable_states(): - return [ - ShareItemStatus.Share_Succeeded.value, - ShareItemStatus.Revoke_Failed.value, - ] - - -class ShareObject: - @staticmethod - @has_resource_perm(permissions.CREATE_SHARE_OBJECT) - def create_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('principalId'): - raise exceptions.RequiredParameter('principalId') - if not data.get('datasetUri'): - raise exceptions.RequiredParameter('datasetUri') - - principalId = data['principalId'] - principalType = data['principalType'] - datasetUri = data['datasetUri'] - environmentUri = uri - groupUri = data['groupUri'] - itemUri = data.get('itemUri') - itemType = data.get('itemType') - requestPurpose = data.get('requestPurpose') - - dataset: models.Dataset = data.get( - 'dataset', api.Dataset.get_dataset_by_uri(session, datasetUri) - ) - environment: models.Environment = data.get( - 'environment', - api.Environment.get_environment_by_uri(session, environmentUri), - ) - - if environment.region != dataset.region: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, - message=f'Requester Team {groupUri} works in region {environment.region} and the requested dataset is stored in region {dataset.region}', - ) - - if principalType == models.PrincipalType.ConsumptionRole.value: - consumption_role: models.ConsumptionRole = api.Environment.get_environment_consumption_role( - session, - principalId, - environmentUri - ) - principalIAMRoleName = consumption_role.IAMRoleName - else: - env_group: models.EnvironmentGroup = api.Environment.get_environment_group( - session, - groupUri, - environmentUri - ) - principalIAMRoleName = env_group.environmentIAMRoleName - - if ( - dataset.stewards == groupUri or dataset.SamlAdminGroupName == groupUri - ) and environment.environmentUri == dataset.environmentUri and principalType == models.PrincipalType.Group.value: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, - message=f'Team: {groupUri} is managing the dataset {dataset.name}', - ) - - ShareObject.validate_group_membership( - session=session, - username=username, - groups=groups, - share_object_group=groupUri, - environment_uri=uri, - ) - - share: models.ShareObject = ( - session.query(models.ShareObject) - .filter( - and_( - models.ShareObject.datasetUri == datasetUri, - models.ShareObject.principalId == principalId, - models.ShareObject.environmentUri == environmentUri, - models.ShareObject.groupUri == groupUri, - ) - ) - .first() - ) - if not share: - share = models.ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=environment.environmentUri, - owner=username, - groupUri=groupUri, - principalId=principalId, - principalType=principalType, - principalIAMRoleName=principalIAMRoleName, - status=ShareObjectStatus.Draft.value, - requestPurpose=requestPurpose - ) - session.add(share) - session.commit() - - if itemUri: - item = None - if itemType: - if itemType == ShareableType.StorageLocation.value: - item = session.query(models.DatasetStorageLocation).get(itemUri) - if itemType == ShareableType.Table.value: - item = session.query(models.DatasetTable).get(itemUri) - - share_item = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.itemUri == itemUri, - ) - ) - .first() - ) - S3AccessPointName = utils.slugify( - share.datasetUri + '-' + share.principalId, - max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' - ) - - if not share_item and item: - new_share_item: models.ShareObjectItem = models.ShareObjectItem( - shareUri=share.shareUri, - itemUri=itemUri, - itemType=itemType, - itemName=item.name, - status=ShareItemStatus.PendingApproval.value, - owner=username, - GlueDatabaseName=dataset.GlueDatabaseName - if itemType == ShareableType.Table.value - else '', - GlueTableName=item.GlueTableName - if itemType == ShareableType.Table.value - else '', - S3AccessPointName=S3AccessPointName - if itemType == ShareableType.StorageLocation.value - else '', - ) - session.add(new_share_item) - - activity = models.Activity( - action='SHARE_OBJECT:CREATE', - label='SHARE_OBJECT:CREATE', - owner=username, - summary=f'{username} created a share object for the {dataset.name} in {environment.name} for the principal: {principalId}', - targetUri=dataset.datasetUri, - targetType='dataset', - ) - session.add(activity) - - # Attaching REQUESTER permissions to: - # requester group (groupUri) - # environment.SamlGroupName (if not dataset admins) - ResourcePolicy.attach_resource_policy( - session=session, - group=groupUri, - permissions=permissions.SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, - ) - - # Attaching APPROVER permissions to: - # dataset.stewards (includes the dataset Admins) - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, - ) - if dataset.stewards != dataset.SamlAdminGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, - ) - return share - - @staticmethod - def validate_group_membership( - session, environment_uri, share_object_group, username, groups - ): - if share_object_group and share_object_group not in groups: - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, - message=f'User: {username} is not a member of the team {share_object_group}', - ) - if share_object_group not in Environment.list_environment_groups( - session=session, - username=username, - groups=groups, - uri=environment_uri, - data=None, - check_perm=True, - ): - raise exceptions.UnauthorizedOperation( - action=permissions.CREATE_SHARE_OBJECT, - message=f'Team: {share_object_group} is not a member of the environment {environment_uri}', - ) - - @staticmethod - @has_resource_perm(permissions.SUBMIT_SHARE_OBJECT) - def submit_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) - - valid_states = [ShareItemStatus.PendingApproval.value] - valid_share_items_states = [x for x in valid_states if x in share_items_states] - - if valid_share_items_states == []: - raise exceptions.ShareItemsFound( - action='Submit Share Object', - message='The request is empty of pending items. Add items to share request.', - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Submit.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Submit.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - api.Notification.notify_share_object_submission( - session, username, dataset, share - ) - return share - - @staticmethod - @has_resource_perm(permissions.APPROVE_SHARE_OBJECT) - def approve_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Approve.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Approve.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE - share_table_items = session.query(models.ShareObjectItem).filter( - ( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == ShareableType.Table.value - ) - ) - ).all() - for table in share_table_items: - ResourcePolicy.attach_resource_policy( - session=session, - group=share.principalId, - permissions=permissions.DATASET_TABLE_READ, - resource_uri=table.itemUri, - resource_type=models.DatasetTable.__name__, - ) - - share.rejectPurpose = "" - session.commit() - - api.Notification.notify_share_object_approval(session, username, dataset, share) - return share - - @staticmethod - @has_resource_perm(permissions.SUBMIT_SHARE_OBJECT) - def update_share_request_purpose( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) - share.requestPurpose = data.get("requestPurpose") - session.commit() - return True - - @staticmethod - @has_resource_perm(permissions.REJECT_SHARE_OBJECT) - def update_share_reject_purpose( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - share = ShareObject.get_share_by_uri(session, uri) - share.rejectPurpose = data.get("rejectPurpose") - session.commit() - return True - - @staticmethod - @has_resource_perm(permissions.REJECT_SHARE_OBJECT) - def reject_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - - share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - share_items_states = ShareObject.get_share_items_states(session, uri) - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Reject.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Reject.value) - Item_SM.update_state(session, share.shareUri, new_state) - - Share_SM.update_state(session, share, new_share_state) - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.groupUri, - resource_uri=dataset.datasetUri, - ) - - # Update Reject Purpose - share.rejectPurpose = data.get("rejectPurpose") - session.commit() - - api.Notification.notify_share_object_rejection(session, username, dataset, share) - return share - - @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) - def revoke_items_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObject: - - share = ShareObject.get_share_by_uri(session, uri) - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - revoked_items_states = ShareObject.get_share_items_states(session, uri, data.get("revokedItemUris")) - revoked_items = [ShareObject.get_share_item_by_uri(session, uri) for uri in data.get("revokedItemUris")] - - if revoked_items_states == []: - raise exceptions.ShareItemsFound( - action='Revoke Items from Share Object', - message='Nothing to be revoked.', - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.RevokeItems.value) - - for item_state in revoked_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.RevokeItems.value) - for item in revoked_items: - if item.status == item_state: - Item_SM.update_state_single_item(session, item, new_state) - - Share_SM.update_state(session, share, new_share_state) - - ResourcePolicy.delete_resource_policy( - session=session, - group=share.groupUri, - resource_uri=dataset.datasetUri, - ) - api.Notification.notify_share_object_rejection(session, username, dataset, share) - return share - - @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) - def get_share_object( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): - share = session.query(models.ShareObject).get(uri) - if not share: - raise exceptions.ObjectNotFound('Share', uri) - - return share - - @staticmethod - @has_resource_perm(permissions.GET_SHARE_OBJECT) - def get_share_item( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ): - share_item: models.ShareObjectItem = data.get( - 'share_item', - ShareObject.get_share_item_by_uri(session, data['shareItemUri']), - ) - if share_item.itemType == ShareableType.Table.value: - return session.query(models.DatasetTable).get(share_item.itemUri) - if share_item.itemType == ShareableType.StorageLocation: - return session.Query(models.DatasetStorageLocation).get(share_item.itemUri) - - @staticmethod - def get_share_by_uri(session, uri): - share = session.query(models.ShareObject).get(uri) - if not share: - raise exceptions.ObjectNotFound('Share', uri) - return share - - @staticmethod - def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): - share: models.ShareObject = ( - session.query(models.ShareObject) - .filter(models.ShareObject.datasetUri == dataset_uri) - .filter(models.ShareObject.owner == dataset_owner) - .first() - ) - return share - - @staticmethod - @has_resource_perm(permissions.ADD_ITEM) - def add_share_object_item( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.ShareObjectItem: - itemType = data.get('itemType') - itemUri = data.get('itemUri') - item = None - share: models.ShareObject = session.query(models.ShareObject).get(uri) - dataset: models.Dataset = session.query(models.Dataset).get(share.datasetUri) - target_environment: models.Environment = session.query(models.Environment).get( - share.environmentUri - ) - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareItemActions.AddItem.value) - Share_SM.update_state(session, share, new_share_state) - - if itemType == ShareableType.Table.value: - item: models.DatasetTable = session.query(models.DatasetTable).get(itemUri) - if item and item.region != target_environment.region: - raise exceptions.UnauthorizedOperation( - action=permissions.ADD_ITEM, - message=f'Lake Formation cross region sharing is not supported. ' - f'Table {item.GlueTableName} is in {item.region} and target environment ' - f'{target_environment.name} is in {target_environment.region} ', - ) - - elif itemType == ShareableType.StorageLocation.value: - item = session.query(models.DatasetStorageLocation).get(itemUri) - - if not item: - raise exceptions.ObjectNotFound('ShareObjectItem', itemUri) - - shareItem: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemUri == itemUri, - ) - ) - .first() - ) - S3AccessPointName = utils.slugify( - share.datasetUri + '-' + share.principalId, - max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' - ) - logger.info(f"S3AccessPointName={S3AccessPointName}") - - if not shareItem: - shareItem = models.ShareObjectItem( - shareUri=uri, - itemUri=itemUri, - itemType=itemType, - itemName=item.name, - status=ShareItemStatus.PendingApproval.value, - owner=username, - GlueDatabaseName=dataset.GlueDatabaseName - if itemType == ShareableType.Table.value - else '', - GlueTableName=item.GlueTableName - if itemType == ShareableType.Table.value - else '', - S3AccessPointName=S3AccessPointName - if itemType == ShareableType.StorageLocation.value - else '', - ) - session.add(shareItem) - - return shareItem - - @staticmethod - @has_resource_perm(permissions.REMOVE_ITEM) - def remove_share_object_item( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> bool: - - share_item: models.ShareObjectItem = data.get( - 'share_item', - ShareObject.get_share_item_by_uri(session, data['shareItemUri']), - ) - share: models.ShareObject = data.get( - 'share', - ShareObject.get_share_by_uri(session, uri), - ) - - Item_SM = ShareItemSM(share_item.status) - newstatus = Item_SM.run_transition(ShareItemActions.RemoveItem.value) - - session.delete(share_item) - return True - - @staticmethod - @has_resource_perm(permissions.DELETE_SHARE_OBJECT) - def delete_share_object(session, username, groups, uri, data=None, check_perm=None): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - share_items_states = ShareObject.get_share_items_states(session, uri) - shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in share_items_states] - - Share_SM = ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(ShareObjectActions.Delete.value) - - for item_state in share_items_states: - Item_SM = ShareItemSM(item_state) - new_state = Item_SM.run_transition(ShareObjectActions.Delete.value) - Item_SM.update_state(session, share.shareUri, new_state) - - if shared_share_items_states: - raise exceptions.ShareItemsFound( - action='Delete share object', - message='There are shared items in this request. Revoke access to these items before deleting the request.', - ) - if new_share_state == ShareObjectStatus.Deleted.value: - session.delete(share) - - return True - - @staticmethod - def check_existing_shared_items(session, uri): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - shared_items = session.query(models.ShareObjectItem).filter( - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.status.in_(share_item_shared_states) - ) - ).all() - if shared_items: - return True - return False - - @staticmethod - def check_existing_shared_items_of_type(session, uri, item_type): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - shared_items = session.query(models.ShareObjectItem).filter( - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.itemType == item_type, - models.ShareObjectItem.status.in_(share_item_shared_states) - ) - ).all() - if shared_items: - return True - return False - - @staticmethod - def check_pending_share_items(session, uri): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - shared_items = session.query(models.ShareObjectItem).filter( - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.ShareObjectItem.status.in_([ShareItemStatus.PendingApproval.value]) - ) - ).all() - if shared_items: - return True - return False - - @staticmethod - def get_share_item_by_uri(session, uri): - share_item: models.ShareObjectItem = session.query(models.ShareObjectItem).get( - uri - ) - if not share_item: - raise exceptions.ObjectNotFound('ShareObjectItem', uri) - - return share_item - - @staticmethod - @has_resource_perm(permissions.LIST_SHARED_ITEMS) - def list_shared_items(session, username, groups, uri, data=None, check_perm=None): - share: models.ShareObject = ShareObject.get_share_by_uri(session, uri) - query = session.query(models.ShareObjectItem).filter( - models.ShareObjectItem.shareUri == share.shareUri, - ) - return paginate( - query, page=data.get('page', 1), page_size=data.get('pageSize', 5) - ).to_dict() - - @staticmethod - def list_shareable_items( - session, username, groups, uri, data=None, check_perm=None - ): - - share: models.ShareObject = data.get( - 'share', ShareObject.get_share_by_uri(session, uri) - ) - share_item_revokable_states = ShareItemSM.get_share_item_revokable_states() - datasetUri = share.datasetUri - - # All tables from dataset with a column isShared - # marking the table as part of the shareObject - tables = ( - session.query( - models.DatasetTable.tableUri.label('itemUri'), - func.coalesce('DatasetTable').label('itemType'), - models.DatasetTable.GlueTableName.label('itemName'), - models.DatasetTable.description.label('description'), - models.ShareObjectItem.shareItemUri.label('shareItemUri'), - models.ShareObjectItem.status.label('status'), - case( - [(models.ShareObjectItem.shareItemUri.isnot(None), True)], - else_=False, - ).label('isShared'), - ) - .outerjoin( - models.ShareObjectItem, - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.DatasetTable.tableUri == models.ShareObjectItem.itemUri, - ), - ) - .filter(models.DatasetTable.datasetUri == datasetUri) - ) - if data: - if data.get("isRevokable"): - tables = tables.filter(models.ShareObjectItem.status.in_(share_item_revokable_states)) - - # All folders from the dataset with a column isShared - # marking the folder as part of the shareObject - locations = ( - session.query( - models.DatasetStorageLocation.locationUri.label('itemUri'), - func.coalesce('DatasetStorageLocation').label('itemType'), - models.DatasetStorageLocation.S3Prefix.label('itemName'), - models.DatasetStorageLocation.description.label('description'), - models.ShareObjectItem.shareItemUri.label('shareItemUri'), - models.ShareObjectItem.status.label('status'), - case( - [(models.ShareObjectItem.shareItemUri.isnot(None), True)], - else_=False, - ).label('isShared'), - ) - .outerjoin( - models.ShareObjectItem, - and_( - models.ShareObjectItem.shareUri == share.shareUri, - models.DatasetStorageLocation.locationUri - == models.ShareObjectItem.itemUri, - ), - ) - .filter(models.DatasetStorageLocation.datasetUri == datasetUri) - ) - if data: - if data.get("isRevokable"): - locations = locations.filter(models.ShareObjectItem.status.in_(share_item_revokable_states)) - - shareable_objects = tables.union(locations).subquery('shareable_objects') - query = session.query(shareable_objects) - - if data: - if data.get('term'): - term = data.get('term') - query = query.filter( - or_( - shareable_objects.c.itemName.ilike(term + '%'), - shareable_objects.c.description.ilike(term + '%'), - ) - ) - if 'isShared' in data.keys(): - isShared = data.get('isShared') - query = query.filter(shareable_objects.c.isShared == isShared) - - return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() - - @staticmethod - def list_user_received_share_requests( - session, username, groups, uri, data=None, check_perm=None - ): - query = ( - session.query(models.ShareObject) - .join( - models.Dataset, - models.Dataset.datasetUri == models.ShareObject.datasetUri, - ) - .filter( - or_( - models.Dataset.businessOwnerEmail == username, - models.Dataset.businessOwnerDelegationEmails.contains( - f'{{{username}}}' - ), - models.Dataset.stewards.in_(groups), - models.Dataset.SamlAdminGroupName.in_(groups), - ) - ) - ) - return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() - - @staticmethod - def list_user_sent_share_requests( - session, username, groups, uri, data=None, check_perm=None - ): - query = ( - session.query(models.ShareObject) - .join( - models.Environment, - models.Environment.environmentUri == models.ShareObject.environmentUri, - ) - .filter( - or_( - models.ShareObject.owner == username, - and_( - models.ShareObject.groupUri.in_(groups), - models.ShareObject.principalType.in_([PrincipalType.Group.value, PrincipalType.ConsumptionRole.value]) - ), - ) - ) - ) - return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() - - @staticmethod - def get_share_by_dataset_and_environment(session, dataset_uri, environment_uri): - environment_groups = session.query(models.EnvironmentGroup).filter( - models.EnvironmentGroup.environmentUri == environment_uri - ) - groups = [g.groupUri for g in environment_groups] - share = session.query(models.ShareObject).filter( - and_( - models.ShareObject.datasetUri == dataset_uri, - models.ShareObject.environmentUri == environment_uri, - models.ShareObject.groupUri.in_(groups), - ) - ) - if not share: - raise exceptions.ObjectNotFound('Share', f'{dataset_uri}/{environment_uri}') - return share - - @staticmethod - def update_share_object_status( - session, - shareUri: str, - status: str, - ) -> models.ShareObject: - - share = ShareObject.get_share_by_uri(session, shareUri) - share.status = status - session.commit() - return share - - @staticmethod - def update_share_item_status( - session, - uri: str, - status: str, - ) -> models.ShareObjectItem: - - share_item = ShareObject.get_share_item_by_uri(session, uri) - share_item.status = status - session.commit() - return share_item - - @staticmethod - def delete_share_item_status_batch( - session, - share_uri: str, - status: str, - ): - ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == share_uri, - models.ShareObjectItem.status == status - ) - ) - .delete() - ) - - @staticmethod - def update_share_item_status_batch( - session, - share_uri: str, - old_status: str, - new_status: str, - ) -> bool: - - ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == share_uri, - models.ShareObjectItem.status == old_status - ) - ) - .update( - { - models.ShareObjectItem.status: new_status, - } - ) - ) - return True - - @staticmethod - def find_share_item_by_table( - session, - share: models.ShareObject, - table: models.DatasetTable, - ) -> models.ShareObjectItem: - share_item: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.itemUri == table.tableUri, - models.ShareObjectItem.shareUri == share.shareUri, - ) - ) - .first() - ) - return share_item - - @staticmethod - def find_share_item_by_folder( - session, - share: models.ShareObject, - folder: models.DatasetStorageLocation, - ) -> models.ShareObjectItem: - share_item: models.ShareObjectItem = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.itemUri == folder.locationUri, - models.ShareObjectItem.shareUri == share.shareUri, - ) - ) - .first() - ) - return share_item - - @staticmethod - def get_share_data(session, share_uri): - share: models.ShareObject = session.query(models.ShareObject).get(share_uri) - if not share: - raise exceptions.ObjectNotFound('Share', share_uri) - - dataset: models.Dataset = session.query(models.Dataset).get(share.datasetUri) - if not dataset: - raise exceptions.ObjectNotFound('Dataset', share.datasetUri) - - source_environment: models.Environment = session.query(models.Environment).get( - dataset.environmentUri - ) - if not source_environment: - raise exceptions.ObjectNotFound('SourceEnvironment', dataset.environmentUri) - - target_environment: models.Environment = session.query(models.Environment).get( - share.environmentUri - ) - if not target_environment: - raise exceptions.ObjectNotFound('TargetEnvironment', share.environmentUri) - - env_group: models.EnvironmentGroup = ( - session.query(models.EnvironmentGroup) - .filter( - and_( - models.EnvironmentGroup.environmentUri == share.environmentUri, - models.EnvironmentGroup.groupUri == share.groupUri, - ) - ) - .first() - ) - if not env_group: - raise Exception( - f'Share object Team {share.groupUri} is not a member of the ' - f'environment {target_environment.name}/{target_environment.AwsAccountId}' - ) - - source_env_group: models.EnvironmentGroup = ( - session.query(models.EnvironmentGroup) - .filter( - and_( - models.EnvironmentGroup.environmentUri == dataset.environmentUri, - models.EnvironmentGroup.groupUri == dataset.SamlAdminGroupName, - ) - ) - .first() - ) - if not source_env_group: - raise Exception( - f'Share object Team {dataset.SamlAdminGroupName} is not a member of the ' - f'environment {dataset.environmentUri}' - ) - - return ( - source_env_group, - env_group, - dataset, - share, - source_environment, - target_environment, - ) - - @staticmethod - def get_share_data_items(session, share_uri, status): - share: models.ShareObject = session.query(models.ShareObject).get(share_uri) - if not share: - raise exceptions.ObjectNotFound('Share', share_uri) - - tables = ( - session.query(models.DatasetTable) - .join( - models.ShareObjectItem, - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri, - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .filter( - and_( - models.ShareObject.datasetUri == share.datasetUri, - models.ShareObject.environmentUri - == share.environmentUri, - models.ShareObject.shareUri == share_uri, - models.ShareObjectItem.status == status, - ) - ) - .all() - ) - - folders = ( - session.query(models.DatasetStorageLocation) - .join( - models.ShareObjectItem, - models.ShareObjectItem.itemUri == models.DatasetStorageLocation.locationUri, - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .filter( - and_( - models.ShareObject.datasetUri == share.datasetUri, - models.ShareObject.environmentUri - == share.environmentUri, - models.ShareObject.shareUri == share_uri, - models.ShareObjectItem.status == status, - ) - ) - .all() - ) - - return ( - tables, - folders, - ) - - @staticmethod - def other_approved_share_object_exists(session, environment_uri, dataset_uri): - return ( - session.query(models.ShareObject) - .filter( - and_( - models.Environment.environmentUri == environment_uri, - models.ShareObject.status - == models.Enums.ShareObjectStatus.Approved.value, - models.ShareObject.datasetUri == dataset_uri, - ) - ) - .all() - ) - - @staticmethod - def get_share_items_states(session, share_uri, item_uris=None): - query = ( - session.query(models.ShareObjectItem) - .join( - models.ShareObject, - models.ShareObjectItem.shareUri == models.ShareObject.shareUri, - ) - .filter( - and_( - models.ShareObject.shareUri == share_uri, - ) - ) - ) - if item_uris: - query = query.filter(models.ShareObjectItem.shareItemUri.in_(item_uris)) - return [item.status for item in query.distinct(models.ShareObjectItem.status)] - - @staticmethod - def resolve_share_object_statistics(session, uri, **kwargs): - share_item_shared_states = ShareItemSM.get_share_item_shared_states() - tables = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == 'DatasetTable', - ) - ) - .count() - ) - locations = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.itemType == 'DatasetStorageLocation', - ) - ) - .count() - ) - shared_items = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(share_item_shared_states), - ) - ) - .count() - ) - revoked_items = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_([ShareItemStatus.Revoke_Succeeded.value]), - ) - ) - .count() - ) - failed_states = [ - ShareItemStatus.Share_Failed.value, - ShareItemStatus.Revoke_Failed.value - ] - failed_items = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(failed_states), - ) - ) - .count() - ) - pending_states = [ - ShareItemStatus.PendingApproval.value - ] - pending_items = ( - session.query(models.ShareObjectItem) - .filter( - and_( - models.ShareObjectItem.shareUri == uri, - models.ShareObjectItem.status.in_(pending_states), - ) - ) - .count() - ) - return {'tables': tables, 'locations': locations, 'sharedItems': shared_items, 'revokedItems': revoked_items, 'failedItems': failed_items, 'pendingItems': pending_items} diff --git a/backend/dataall/db/api/stack.py b/backend/dataall/db/api/stack.py deleted file mode 100644 index c6d6946ad..000000000 --- a/backend/dataall/db/api/stack.py +++ /dev/null @@ -1,97 +0,0 @@ -import logging - -from . import ResourcePolicy, TargetType -from .. import exceptions -from .. import models -from ...utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) - -log = logging.getLogger(__name__) - - -class Stack: - @staticmethod - def get_stack_by_target_uri(session, target_uri): - stack = Stack.find_stack_by_target_uri(session, target_uri) - if not stack: - raise exceptions.ObjectNotFound('Stack', target_uri) - return stack - - @staticmethod - def find_stack_by_target_uri(session, target_uri): - stack: models.Stack = ( - session.query(models.Stack) - .filter(models.Stack.targetUri == target_uri) - .first() - ) - return stack - - @staticmethod - def get_stack_by_uri(session, stack_uri): - stack = Stack.find_stack_by_uri(session, stack_uri) - if not stack: - raise exceptions.ObjectNotFound('Stack', stack_uri) - return stack - - @staticmethod - def find_stack_by_uri(session, stack_uri): - stack: models.Stack = session.query(models.Stack).get(stack_uri) - return stack - - @staticmethod - def create_stack( - session, environment_uri, target_label, target_uri, target_type, payload=None - ) -> models.Stack: - environment: models.Environment = session.query(models.Environment).get( - environment_uri - ) - if not environment: - raise exceptions.ObjectNotFound('Environment', environment_uri) - - stack = models.Stack( - targetUri=target_uri, - accountid=environment.AwsAccountId, - region=environment.region, - stack=target_type, - payload=payload, - name=NamingConventionService( - target_label=target_type, - target_uri=target_uri, - pattern=NamingConventionPattern.DEFAULT, - resource_prefix=environment.resourcePrefix, - ).build_compliant_name(), - ) - session.add(stack) - session.commit() - return stack - - @staticmethod - def update_stack( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> [models.Stack]: - - if not uri: - raise exceptions.RequiredParameter('targetUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('targetType'): - raise exceptions.RequiredParameter('targetType') - - ResourcePolicy.check_user_resource_permission( - session=session, - username=username, - groups=groups, - resource_uri=uri, - permission_name=TargetType.get_resource_update_permission_name( - data['targetType'] - ), - ) - stack = Stack.get_stack_by_target_uri(session, target_uri=uri) - return stack diff --git a/backend/dataall/db/api/target_type.py b/backend/dataall/db/api/target_type.py deleted file mode 100644 index 9dc4b54fd..000000000 --- a/backend/dataall/db/api/target_type.py +++ /dev/null @@ -1,64 +0,0 @@ -import logging - -from .. import exceptions, permissions -from .. import models - -logger = logging.getLogger(__name__) - - -class TargetType: - @staticmethod - def get_target_type_permissions(): - return dict( - dataset=(permissions.GET_DATASET, permissions.UPDATE_DATASET), - environment=(permissions.GET_ENVIRONMENT, permissions.UPDATE_ENVIRONMENT), - notebook=(permissions.GET_NOTEBOOK, permissions.UPDATE_NOTEBOOK), - mlstudio=( - permissions.GET_SGMSTUDIO_NOTEBOOK, - permissions.UPDATE_SGMSTUDIO_NOTEBOOK, - ), - pipeline=(permissions.GET_PIPELINE, permissions.UPDATE_PIPELINE), - redshift=( - permissions.GET_REDSHIFT_CLUSTER, - permissions.GET_REDSHIFT_CLUSTER, - ), - ) - - @staticmethod - def get_resource_update_permission_name(target_type): - TargetType.is_supported_target_type(target_type) - return TargetType.get_target_type_permissions()[target_type][1] - - @staticmethod - def get_resource_read_permission_name(target_type): - TargetType.is_supported_target_type(target_type) - return TargetType.get_target_type_permissions()[target_type][0] - - @staticmethod - def is_supported_target_type(target_type): - supported_types = [ - 'dataset', - 'environment', - 'notebook', - 'mlstudio', - 'pipeline', - 'redshift', - ] - if target_type not in supported_types: - raise exceptions.InvalidInput( - 'targetType', - target_type, - ' or '.join(supported_types), - ) - - @staticmethod - def get_target_type(model_name): - target_types_map = dict( - environment=models.Environment, - dataset=models.Dataset, - notebook=models.SagemakerNotebook, - mlstudio=models.SagemakerStudioUserProfile, - pipeline=models.DataPipeline, - redshift=models.RedshiftCluster, - ) - return [k for k, v in target_types_map.items() if v == model_name][0] diff --git a/backend/dataall/db/api/tenant.py b/backend/dataall/db/api/tenant.py deleted file mode 100644 index ef377992c..000000000 --- a/backend/dataall/db/api/tenant.py +++ /dev/null @@ -1,42 +0,0 @@ -import logging - -from .. import models - -logger = logging.getLogger(__name__) - - -class Tenant: - @staticmethod - def find_tenant_by_name(session, tenant_name: str) -> models.Tenant: - if tenant_name: - tenant = ( - session.query(models.Tenant) - .filter(models.Tenant.name == tenant_name) - .first() - ) - return tenant - - @staticmethod - def get_tenant_by_name(session, tenant_name: str) -> models.Tenant: - if not tenant_name: - raise Exception('Tenant name is required') - tenant = Tenant.find_tenant_by_name(session, tenant_name) - if not tenant: - raise Exception('TenantNotFound') - return tenant - - @staticmethod - def save_tenant(session, name: str, description: str) -> models.Tenant: - if not name: - raise Exception('Tenant name is required') - - tenant = Tenant.find_tenant_by_name(session, name) - if tenant: - return tenant - else: - tenant = models.Tenant( - name=name, description=description if description else f'Tenant {name}' - ) - session.add(tenant) - session.commit() - return tenant diff --git a/backend/dataall/db/api/tenant_policy.py b/backend/dataall/db/api/tenant_policy.py deleted file mode 100644 index b0e1b5fa7..000000000 --- a/backend/dataall/db/api/tenant_policy.py +++ /dev/null @@ -1,403 +0,0 @@ -import logging - -from sqlalchemy.sql import and_ - -from .. import exceptions, permissions, paginate -from .. import models -from ..api.permission import Permission -from ..api.tenant import Tenant -from ..models.Permission import PermissionType - -logger = logging.getLogger(__name__) - -TENANT_NAME = 'dataall' - - -class TenantPolicy: - @staticmethod - def is_tenant_admin(groups: [str]): - if not groups: - return False - - if 'DAAdministrators' in groups: - return True - - return False - - @staticmethod - def check_user_tenant_permission( - session, username: str, groups: [str], tenant_name: str, permission_name: str - ): - if TenantPolicy.is_tenant_admin(groups): - return True - - tenant_policy = TenantPolicy.has_user_tenant_permission( - session=session, - username=username, - groups=groups, - permission_name=permission_name, - tenant_name=tenant_name, - ) - - if not tenant_policy: - raise exceptions.TenantUnauthorized( - username=username, - action=permission_name, - tenant_name=tenant_name, - ) - - else: - return tenant_policy - - @staticmethod - def has_user_tenant_permission( - session, username: str, groups: [str], tenant_name: str, permission_name: str - ): - if not username or not permission_name: - return False - tenant_policy: models.TenantPolicy = ( - session.query(models.TenantPolicy) - .join( - models.TenantPolicyPermission, - models.TenantPolicy.sid == models.TenantPolicyPermission.sid, - ) - .join( - models.Tenant, - models.Tenant.tenantUri == models.TenantPolicy.tenantUri, - ) - .join( - models.Permission, - models.Permission.permissionUri - == models.TenantPolicyPermission.permissionUri, - ) - .filter( - models.TenantPolicy.principalId.in_(groups), - models.Permission.name == permission_name, - models.Tenant.name == tenant_name, - ) - .first() - ) - return tenant_policy - - @staticmethod - def has_group_tenant_permission( - session, group_uri: str, tenant_name: str, permission_name: str - ): - if not group_uri or not permission_name: - return False - - tenant_policy: models.TenantPolicy = ( - session.query(models.TenantPolicy) - .join( - models.TenantPolicyPermission, - models.TenantPolicy.sid == models.TenantPolicyPermission.sid, - ) - .join( - models.Tenant, - models.Tenant.tenantUri == models.TenantPolicy.tenantUri, - ) - .join( - models.Permission, - models.Permission.permissionUri - == models.TenantPolicyPermission.permissionUri, - ) - .filter( - and_( - models.TenantPolicy.principalId == group_uri, - models.Permission.name == permission_name, - models.Tenant.name == tenant_name, - ) - ) - .first() - ) - - if not tenant_policy: - return False - else: - return tenant_policy - - @staticmethod - def find_tenant_policy(session, group_uri: str, tenant_name: str): - - TenantPolicy.validate_find_tenant_policy(group_uri, tenant_name) - - tenant_policy = ( - session.query(models.TenantPolicy) - .join( - models.Tenant, models.Tenant.tenantUri == models.TenantPolicy.tenantUri - ) - .filter( - and_( - models.TenantPolicy.principalId == group_uri, - models.Tenant.name == tenant_name, - ) - ) - .first() - ) - return tenant_policy - - @staticmethod - def validate_find_tenant_policy(group_uri, tenant_name): - if not group_uri: - raise exceptions.RequiredParameter(param_name='group_uri') - if not tenant_name: - raise exceptions.RequiredParameter(param_name='tenant_name') - - @staticmethod - def attach_group_tenant_policy( - session, - group: str, - permissions: [str], - tenant_name: str, - ) -> models.TenantPolicy: - - TenantPolicy.validate_attach_tenant_policy(group, permissions, tenant_name) - - policy = TenantPolicy.save_group_tenant_policy(session, group, tenant_name) - - TenantPolicy.add_permission_to_group_tenant_policy( - session, group, permissions, tenant_name, policy - ) - - return policy - - @staticmethod - def validate_attach_tenant_policy(group, permissions, tenant_name): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not permissions: - raise exceptions.RequiredParameter(param_name='permissions') - if not tenant_name: - raise exceptions.RequiredParameter(param_name='tenant_name') - - @staticmethod - def save_group_tenant_policy(session, group, tenant_name): - - TenantPolicy.validate_save_tenant_policy(group, tenant_name) - - policy = TenantPolicy.find_tenant_policy(session, group, tenant_name) - if not policy: - policy = models.TenantPolicy( - principalId=group, - principalType='GROUP', - tenant=Tenant.get_tenant_by_name(session, tenant_name), - ) - session.add(policy) - session.commit() - return policy - - @staticmethod - def validate_save_tenant_policy(group, tenant_name): - if not group: - raise exceptions.RequiredParameter(param_name='group') - if not tenant_name: - raise exceptions.RequiredParameter(param_name='tenant_name') - - @staticmethod - def add_permission_to_group_tenant_policy( - session, group, permissions, tenant_name, policy - ): - TenantPolicy.validate_add_permission_to_tenant_policy_params( - group, permissions, policy, tenant_name - ) - - for permission in permissions: - if not TenantPolicy.has_group_tenant_permission( - session, - group_uri=group, - permission_name=permission, - tenant_name=tenant_name, - ): - TenantPolicy.associate_permission_to_tenant_policy( - session, policy, permission - ) - - @staticmethod - def validate_add_permission_to_tenant_policy_params( - group, permissions, policy, tenant_name - ): - if not group: - raise exceptions.RequiredParameter(param_name='group') - TenantPolicy.validate_add_permissions_params(permissions, policy, tenant_name) - - @staticmethod - def validate_add_permissions_params(permissions, policy, tenant_name): - if not permissions: - raise exceptions.RequiredParameter(param_name='permissions') - if not tenant_name: - raise exceptions.RequiredParameter(param_name='tenant_name') - if not policy: - raise exceptions.RequiredParameter(param_name='policy') - - @staticmethod - def associate_permission_to_tenant_policy(session, policy, permission): - policy_permission = models.TenantPolicyPermission( - sid=policy.sid, - permissionUri=Permission.get_permission_by_name( - session, permission, PermissionType.TENANT.name - ).permissionUri, - ) - session.add(policy_permission) - session.commit() - - @staticmethod - def get_tenant_policy_permissions(session, group_uri, tenant_name): - if not group_uri: - raise exceptions.RequiredParameter(param_name='group_uri') - if not tenant_name: - raise exceptions.RequiredParameter(param_name='tenant_name') - policy = TenantPolicy.find_tenant_policy( - session=session, - group_uri=group_uri, - tenant_name=tenant_name, - ) - permissions = [] - for p in policy.permissions: - permissions.append(p.permission) - return permissions - - @staticmethod - def delete_tenant_policy( - session, - group: str, - tenant_name: str, - ) -> bool: - - policy = TenantPolicy.find_tenant_policy( - session, group_uri=group, tenant_name=tenant_name - ) - if policy: - for permission in policy.permissions: - session.delete(permission) - session.delete(policy) - session.commit() - - return True - - @staticmethod - def list_group_tenant_permissions( - session, username, groups, uri, data=None, check_perm=None - ): - if not groups: - raise exceptions.RequiredParameter('groups') - if not uri: - raise exceptions.RequiredParameter('groupUri') - - if not TenantPolicy.is_tenant_admin(groups): - raise exceptions.UnauthorizedOperation( - action='LIST_TENANT_TEAM_PERMISSIONS', - message=f'User: {username} is not allowed to manage tenant permissions', - ) - - return TenantPolicy.get_tenant_policy_permissions( - session=session, - group_uri=uri, - tenant_name='dataall', - ) - - @staticmethod - def list_tenant_groups(session, username, groups, uri, data=None, check_perm=None): - if not groups: - raise exceptions.RequiredParameter('groups') - - if not TenantPolicy.is_tenant_admin(groups): - raise exceptions.UnauthorizedOperation( - action='LIST_TENANT_TEAMS', - message=f'User: {username} is not allowed to manage tenant permissions', - ) - - query = session.query( - models.TenantPolicy.principalId.label('name'), - models.TenantPolicy.principalId.label('groupUri'), - ).filter( - and_( - models.TenantPolicy.principalType == 'GROUP', - models.TenantPolicy.principalId != 'DAAdministrators', - ) - ) - - if data and data.get('term'): - query = query.filter( - models.TenantPolicy.principalId.ilike('%' + data.get('term') + '%') - ) - - return paginate( - query=query, - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - def list_tenant_permissions(session, username, groups): - if not TenantPolicy.is_tenant_admin(groups): - raise exceptions.UnauthorizedOperation( - action='LIST_TENANT_TEAM_PERMISSIONS', - message=f'User: {username} is not allowed to manage tenant permissions', - ) - group_invitation_permissions = [] - for p in permissions.TENANT_ALL: - group_invitation_permissions.append( - Permission.find_permission_by_name( - session=session, - permission_name=p, - permission_type=PermissionType.TENANT.name, - ) - ) - return group_invitation_permissions - - @staticmethod - def update_group_permissions( - session, username, groups, uri, data=None, check_perm=None - ): - TenantPolicy.validate_params(data) - - if not TenantPolicy.is_tenant_admin(groups): - exceptions.UnauthorizedOperation( - action='UPDATE_TENANT_TEAM_PERMISSIONS', - message=f'User: {username} is not allowed to manage tenant permissions', - ) - - TenantPolicy.validate_permissions( - session, TENANT_NAME, data['permissions'], uri - ) - - TenantPolicy.delete_tenant_policy( - session=session, group=uri, tenant_name=TENANT_NAME - ) - TenantPolicy.attach_group_tenant_policy( - session=session, - group=uri, - permissions=data['permissions'], - tenant_name=TENANT_NAME, - ) - - return True - - @staticmethod - def validate_permissions(session, tenant_name, g_permissions, group): - g_permissions = list(set(g_permissions)) - - if g_permissions not in permissions.TENANT_ALL: - exceptions.TenantPermissionUnauthorized( - action='UPDATE_TENANT_TEAM_PERMISSIONS', - group_name=group, - tenant_name=tenant_name, - ) - - tenant_group_permissions = [] - for p in g_permissions: - tenant_group_permissions.append( - Permission.find_permission_by_name( - session=session, - permission_name=p, - permission_type=PermissionType.TENANT.name, - ) - ) - return tenant_group_permissions - - @staticmethod - def validate_params(data): - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('permissions'): - raise exceptions.RequiredParameter('permissions') diff --git a/backend/dataall/db/api/vote.py b/backend/dataall/db/api/vote.py deleted file mode 100644 index de3526a0a..000000000 --- a/backend/dataall/db/api/vote.py +++ /dev/null @@ -1,92 +0,0 @@ -import logging -from datetime import datetime - -from .. import exceptions -from .. import models - -logger = logging.getLogger(__name__) - - -class Vote: - @staticmethod - def upvote( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> [models.Vote]: - - if not uri: - raise exceptions.RequiredParameter('targetUri') - if not data: - raise exceptions.RequiredParameter('data') - if not data.get('targetType'): - raise exceptions.RequiredParameter('targetType') - if 'upvote' not in data: - raise exceptions.RequiredParameter('upvote') - - vote: models.Vote = ( - session.query(models.Vote) - .filter( - models.Vote.targetUri == uri, - models.Vote.targetType == data['targetType'], - ) - .first() - ) - if vote: - vote.upvote = data['upvote'] - vote.updated = datetime.now() - - else: - vote: models.Vote = models.Vote( - username=username, - targetUri=uri, - targetType=data['targetType'], - upvote=data['upvote'], - ) - session.add(vote) - - session.commit() - return vote - - @staticmethod - def count_upvotes( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return ( - session.query(models.Vote) - .filter( - models.Vote.targetUri == uri, - models.Vote.targetType == data['targetType'], - models.Vote.upvote == True, - ) - .count() - ) - - @staticmethod - def get_vote(session, username, groups, uri, data=None, check_perm=None) -> dict: - return Vote.find_vote(session, uri, data['targetType']) - - @staticmethod - def find_vote(session, target_uri, target_type) -> [models.Vote]: - return ( - session.query(models.Vote) - .filter( - models.Vote.targetUri == target_uri, - models.Vote.targetType == target_type, - ) - .first() - ) - - @staticmethod - def delete_votes(session, target_uri, target_type) -> [models.Vote]: - return ( - session.query(models.Vote) - .filter( - models.Vote.targetUri == target_uri, - models.Vote.targetType == target_type, - ) - .delete() - ) diff --git a/backend/dataall/db/api/vpc.py b/backend/dataall/db/api/vpc.py deleted file mode 100644 index daa37b1e6..000000000 --- a/backend/dataall/db/api/vpc.py +++ /dev/null @@ -1,169 +0,0 @@ -import logging - -from sqlalchemy import and_ - -from .. import exceptions, permissions -from .. import models -from . import ( - has_tenant_perm, - has_resource_perm, - Environment, - ResourcePolicy, -) - -log = logging.getLogger(__name__) - - -class Vpc: - def __init__(self): - pass - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.CREATE_NETWORK) - def create_network( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Vpc: - - Vpc._validate_input(data) - - Environment.check_group_environment_permission( - session=session, - username=username, - groups=groups, - uri=uri, - group=data['SamlGroupName'], - permission_name=permissions.CREATE_NETWORK, - ) - - vpc = ( - session.query(models.Vpc) - .filter( - and_( - models.Vpc.VpcId == data['vpcId'], models.Vpc.environmentUri == uri - ) - ) - .first() - ) - - if vpc: - raise exceptions.ResourceAlreadyExists( - action=permissions.CREATE_NETWORK, - message=f'Vpc {data["vpcId"]} is already associated to environment {uri}', - ) - - environment = Environment.get_environment_by_uri(session, uri) - - vpc = models.Vpc( - environmentUri=environment.environmentUri, - region=environment.region, - AwsAccountId=environment.AwsAccountId, - VpcId=data['vpcId'], - privateSubnetIds=data.get('privateSubnetIds', []), - publicSubnetIds=data.get('publicSubnetIds', []), - SamlGroupName=data['SamlGroupName'], - owner=username, - label=data['label'], - name=data['label'], - default=data.get('default', False), - ) - session.add(vpc) - session.commit() - - activity = models.Activity( - action='NETWORK:CREATE', - label='NETWORK:CREATE', - owner=username, - summary=f'{username} created network {vpc.label} in {environment.label}', - targetUri=vpc.vpcUri, - targetType='Vpc', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=vpc.SamlGroupName, - permissions=permissions.NETWORK_ALL, - resource_uri=vpc.vpcUri, - resource_type=models.Vpc.__name__, - ) - - if environment.SamlGroupName != vpc.SamlGroupName: - ResourcePolicy.attach_resource_policy( - session=session, - group=environment.SamlGroupName, - permissions=permissions.NETWORK_ALL, - resource_uri=vpc.vpcUri, - resource_type=models.Vpc.__name__, - ) - - return vpc - - @staticmethod - def _validate_input(data): - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('environmentUri'): - raise exceptions.RequiredParameter('environmentUri') - if not data.get('SamlGroupName'): - raise exceptions.RequiredParameter('group') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.GET_NETWORK) - def get_network( - session, - username: str, - groups: [str], - uri: str, - data: dict = None, - check_perm: bool = False, - ) -> models.Vpc: - return Vpc.get_vpc_by_uri(session, uri) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_ENVIRONMENTS) - @has_resource_perm(permissions.DELETE_NETWORK) - def delete(session, username, groups, uri, data=None, check_perm=None) -> bool: - vpc = Vpc.get_vpc_by_uri(session, uri) - session.delete(vpc) - ResourcePolicy.delete_resource_policy( - session=session, resource_uri=uri, group=vpc.SamlGroupName - ) - session.commit() - return True - - @staticmethod - def get_vpc_by_uri(session, vpc_uri) -> models.Vpc: - vpc = session.query(models.Vpc).get(vpc_uri) - if not vpc: - raise exceptions.ObjectNotFound('VPC', vpc_uri) - return vpc - - @staticmethod - def get_environment_vpc_list(session, environment_uri): - return ( - session.query(models.Vpc) - .filter(models.Vpc.environmentUri == environment_uri) - .all() - ) - - @staticmethod - def get_environment_default_vpc(session, environment_uri): - return ( - session.query(models.Vpc) - .filter( - and_( - models.Vpc.environmentUri == environment_uri, - models.Vpc.default == True, - ) - ) - .first() - ) diff --git a/backend/dataall/db/api/worksheet.py b/backend/dataall/db/api/worksheet.py deleted file mode 100644 index 7f975b536..000000000 --- a/backend/dataall/db/api/worksheet.py +++ /dev/null @@ -1,204 +0,0 @@ -import logging - -from sqlalchemy import and_, or_ -from sqlalchemy.orm import Query - -from .. import exceptions, permissions, paginate -from .. import models -from . import has_tenant_perm, ResourcePolicy, has_resource_perm - -logger = logging.getLogger(__name__) - - -class Worksheet: - @staticmethod - def get_worksheet_by_uri(session, uri: str) -> models.Worksheet: - if not uri: - raise exceptions.RequiredParameter(param_name='worksheetUri') - worksheet = Worksheet.find_worksheet_by_uri(session, uri) - if not worksheet: - raise exceptions.ObjectNotFound('Worksheet', uri) - return worksheet - - @staticmethod - def find_worksheet_by_uri(session, uri) -> models.Worksheet: - return session.query(models.Worksheet).get(uri) - - @staticmethod - @has_tenant_perm(permissions.MANAGE_WORKSHEETS) - def create_worksheet( - session, username, groups, uri, data=None, check_perm=None - ) -> models.Worksheet: - if not data: - raise exceptions.RequiredParameter(data) - if not data.get('SamlAdminGroupName'): - raise exceptions.RequiredParameter('groupUri') - if not data.get('label'): - raise exceptions.RequiredParameter('label') - - worksheet = models.Worksheet( - owner=username, - label=data.get('label'), - description=data.get('description', 'No description provided'), - tags=data.get('tags'), - chartConfig={'dimensions': [], 'measures': [], 'chartType': 'bar'}, - SamlAdminGroupName=data['SamlAdminGroupName'], - ) - session.add(worksheet) - session.commit() - - activity = models.Activity( - action='WORKSHEET:CREATE', - label='WORKSHEET:CREATE', - owner=username, - summary=f'{username} created worksheet {worksheet.name} ', - targetUri=worksheet.worksheetUri, - targetType='worksheet', - ) - session.add(activity) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=permissions.WORKSHEET_ALL, - resource_uri=worksheet.worksheetUri, - resource_type=models.Worksheet.__name__, - ) - return worksheet - - @staticmethod - @has_resource_perm(permissions.UPDATE_WORKSHEET) - def update_worksheet(session, username, groups, uri, data=None, check_perm=None): - worksheet = Worksheet.get_worksheet_by_uri(session, uri) - for field in data.keys(): - setattr(worksheet, field, data.get(field)) - session.commit() - - activity = models.Activity( - action='WORKSHEET:UPDATE', - label='WORKSHEET:UPDATE', - owner=username, - summary=f'{username} updated worksheet {worksheet.name} ', - targetUri=worksheet.worksheetUri, - targetType='worksheet', - ) - session.add(activity) - return worksheet - - @staticmethod - @has_resource_perm(permissions.GET_WORKSHEET) - def get_worksheet(session, username, groups, uri, data=None, check_perm=None): - worksheet = Worksheet.get_worksheet_by_uri(session, uri) - return worksheet - - @staticmethod - def query_user_worksheets(session, username, groups, filter) -> Query: - query = session.query(models.Worksheet).filter( - or_( - models.Worksheet.owner == username, - models.Worksheet.SamlAdminGroupName.in_(groups), - ) - ) - if filter and filter.get('term'): - query = query.filter( - or_( - models.Worksheet.label.ilike('%' + filter.get('term') + '%'), - models.Worksheet.description.ilike('%' + filter.get('term') + '%'), - models.Worksheet.tags.contains(f"{{{filter.get('term')}}}"), - ) - ) - return query - - @staticmethod - def paginated_user_worksheets( - session, username, groups, uri, data=None, check_perm=None - ) -> dict: - return paginate( - query=Worksheet.query_user_worksheets(session, username, groups, data), - page=data.get('page', 1), - page_size=data.get('pageSize', 10), - ).to_dict() - - @staticmethod - @has_resource_perm(permissions.SHARE_WORKSHEET) - def share_worksheet( - session, username, groups, uri, data=None, check_perm=None - ) -> models.WorksheetShare: - share = ( - session.query(models.WorksheetShare) - .filter( - and_( - models.WorksheetShare.worksheetUri == uri, - models.WorksheetShare.principalId == data.get('principalId'), - models.WorksheetShare.principalType == data.get('principalType'), - ) - ) - .first() - ) - - if not share: - share = models.WorksheetShare( - worksheetUri=uri, - principalType=data['principalType'], - principalId=data['principalId'], - canEdit=data.get('canEdit', True), - owner=username, - ) - session.add(share) - - ResourcePolicy.attach_resource_policy( - session=session, - group=data['principalId'], - permissions=permissions.WORKSHEET_SHARED, - resource_uri=uri, - resource_type=models.Worksheet.__name__, - ) - return share - - @staticmethod - @has_resource_perm(permissions.SHARE_WORKSHEET) - def update_share_worksheet( - session, username, groups, uri, data=None, check_perm=None - ) -> models.WorksheetShare: - share: models.WorksheetShare = data['share'] - share.canEdit = data['canEdit'] - worksheet = Worksheet.get_worksheet_by_uri(session, uri) - ResourcePolicy.attach_resource_policy( - session=session, - group=share.principalId, - permissions=permissions.WORKSHEET_SHARED, - resource_uri=uri, - resource_type=models.Worksheet.__name__, - ) - return share - - @staticmethod - @has_resource_perm(permissions.SHARE_WORKSHEET) - def delete_share_worksheet( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - share: models.WorksheetShare = data['share'] - ResourcePolicy.delete_resource_policy( - session=session, - group=share.principalId, - resource_uri=uri, - resource_type=models.Worksheet.__name__, - ) - session.delete(share) - session.commit() - return True - - @staticmethod - @has_resource_perm(permissions.DELETE_WORKSHEET) - def delete_worksheet( - session, username, groups, uri, data=None, check_perm=None - ) -> bool: - worksheet = Worksheet.get_worksheet_by_uri(session, uri) - session.delete(worksheet) - ResourcePolicy.delete_resource_policy( - session=session, - group=worksheet.SamlAdminGroupName, - resource_uri=uri, - resource_type=models.Worksheet.__name__, - ) - return True diff --git a/backend/dataall/db/models/Activity.py b/backend/dataall/db/models/Activity.py deleted file mode 100644 index 62b49e1b1..000000000 --- a/backend/dataall/db/models/Activity.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column, String - -from .. import Base -from .. import Resource, utils - - -class Activity(Resource, Base): - __tablename__ = 'activity' - activityUri = Column(String, primary_key=True, default=utils.uuid('activity')) - targetUri = Column(String, nullable=False) - targetType = Column(String, nullable=False) - action = Column(String, nullable=False) - summary = Column(String, nullable=False) diff --git a/backend/dataall/db/models/ConsumptionRole.py b/backend/dataall/db/models/ConsumptionRole.py deleted file mode 100644 index 290634a42..000000000 --- a/backend/dataall/db/models/ConsumptionRole.py +++ /dev/null @@ -1,18 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime - -from .. import Base, Resource, utils - - -class ConsumptionRole(Base): - __tablename__ = 'consumptionrole' - consumptionRoleUri = Column(String, primary_key=True, default=utils.uuid('group')) - consumptionRoleName = Column(String, nullable=False) - environmentUri = Column(String, nullable=False) - groupUri = Column(String, nullable=False) - IAMRoleName = Column(String, nullable=False) - IAMRoleArn = Column(String, nullable=False) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) diff --git a/backend/dataall/db/models/Dashboard.py b/backend/dataall/db/models/Dashboard.py deleted file mode 100644 index a4bd97587..000000000 --- a/backend/dataall/db/models/Dashboard.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class Dashboard(Resource, Base): - __tablename__ = 'dashboard' - environmentUri = Column(String, nullable=False) - organizationUri = Column(String, nullable=False) - dashboardUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('dashboard') - ) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - namespace = Column(String, nullable=False) - DashboardId = Column(String, nullable=False) - SamlGroupName = Column(String, nullable=False) - - userRoleForDashboard = query_expression() diff --git a/backend/dataall/db/models/DashboardShare.py b/backend/dataall/db/models/DashboardShare.py deleted file mode 100644 index a8d25dca0..000000000 --- a/backend/dataall/db/models/DashboardShare.py +++ /dev/null @@ -1,24 +0,0 @@ -from enum import Enum - -from sqlalchemy import Column, String - -from .. import Base, utils - - -class DashboardShareStatus(Enum): - REQUESTED = 'REQUESTED' - APPROVED = 'APPROVED' - REJECTED = 'REJECTED' - - -class DashboardShare(Base): - __tablename__ = 'dashboardshare' - shareUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('shareddashboard') - ) - dashboardUri = Column(String, nullable=False, default=utils.uuid('dashboard')) - SamlGroupName = Column(String, nullable=False) - owner = Column(String, nullable=True) - status = Column( - String, nullable=False, default=DashboardShareStatus.REQUESTED.value - ) diff --git a/backend/dataall/db/models/DataPipeline.py b/backend/dataall/db/models/DataPipeline.py deleted file mode 100644 index 4146a9db1..000000000 --- a/backend/dataall/db/models/DataPipeline.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression -from sqlalchemy.dialects import postgresql - -from .. import Base, Resource, utils - - -class DataPipeline(Resource, Base): - __tablename__ = 'datapipeline' - environmentUri = Column(String, nullable=False) - DataPipelineUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('DataPipelineUri') - ) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - SamlGroupName = Column(String, nullable=False) - repo = Column(String, nullable=False) - devStrategy = Column(String, nullable=False) - template = Column(String, nullable=True, default="") - userRoleForPipeline = query_expression() diff --git a/backend/dataall/db/models/DataPipelineEnvironment.py b/backend/dataall/db/models/DataPipelineEnvironment.py deleted file mode 100644 index 986b7bebd..000000000 --- a/backend/dataall/db/models/DataPipelineEnvironment.py +++ /dev/null @@ -1,19 +0,0 @@ -from sqlalchemy import Column, String, Integer -from sqlalchemy.orm import query_expression -from sqlalchemy.dialects import postgresql - -from .. import Base, Resource, utils - - -class DataPipelineEnvironment(Base, Resource): - __tablename__ = 'datapipelineenvironments' - envPipelineUri = Column(String, nullable=False, primary_key=True) - environmentUri = Column(String, nullable=False) - environmentLabel = Column(String, nullable=False) - pipelineUri = Column(String, nullable=False) - pipelineLabel = Column(String, nullable=False) - stage = Column(String, nullable=False) - order = Column(Integer, nullable=False) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - samlGroupName = Column(String, nullable=False) diff --git a/backend/dataall/db/models/Dataset.py b/backend/dataall/db/models/Dataset.py deleted file mode 100644 index 35de117f9..000000000 --- a/backend/dataall/db/models/Dataset.py +++ /dev/null @@ -1,61 +0,0 @@ -from sqlalchemy import Boolean, Column, String -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class Dataset(Resource, Base): - __tablename__ = 'dataset' - environmentUri = Column(String, nullable=False) - organizationUri = Column(String, nullable=False) - datasetUri = Column(String, primary_key=True, default=utils.uuid('dataset')) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=False) - GlueCrawlerName = Column(String) - GlueCrawlerSchedule = Column(String) - GlueProfilingJobName = Column(String) - GlueProfilingTriggerSchedule = Column(String) - GlueProfilingTriggerName = Column(String) - GlueDataQualityJobName = Column(String) - GlueDataQualitySchedule = Column(String) - GlueDataQualityTriggerName = Column(String) - IAMDatasetAdminRoleArn = Column(String, nullable=False) - IAMDatasetAdminUserArn = Column(String, nullable=False) - KmsAlias = Column(String, nullable=False) - userRoleForDataset = query_expression() - userRoleInEnvironment = query_expression() - isPublishedInEnvironment = query_expression() - projectPermission = query_expression() - language = Column(String, nullable=False, default='English') - topics = Column(postgresql.ARRAY(String), nullable=True) - confidentiality = Column(String, nullable=False, default='Unclassified') - tags = Column(postgresql.ARRAY(String)) - inProject = query_expression() - - bucketCreated = Column(Boolean, default=False) - glueDatabaseCreated = Column(Boolean, default=False) - iamAdminRoleCreated = Column(Boolean, default=False) - iamAdminUserCreated = Column(Boolean, default=False) - kmsAliasCreated = Column(Boolean, default=False) - lakeformationLocationCreated = Column(Boolean, default=False) - bucketPolicyCreated = Column(Boolean, default=False) - - # bookmarked = Column(Integer, default=0) - # upvotes=Column(Integer, default=0) - - businessOwnerEmail = Column(String, nullable=True) - businessOwnerDelegationEmails = Column(postgresql.ARRAY(String), nullable=True) - stewards = Column(String, nullable=True) - - SamlAdminGroupName = Column(String, nullable=True) - - redshiftClusterPermission = query_expression() - - importedS3Bucket = Column(Boolean, default=False) - importedGlueDatabase = Column(Boolean, default=False) - importedKmsKey = Column(Boolean, default=False) - importedAdminRole = Column(Boolean, default=False) - imported = Column(Boolean, default=False) diff --git a/backend/dataall/db/models/DatasetProfilingRun.py b/backend/dataall/db/models/DatasetProfilingRun.py deleted file mode 100644 index b4996db64..000000000 --- a/backend/dataall/db/models/DatasetProfilingRun.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.dialects.postgresql import JSON - -from .. import Base, Resource, utils - - -class DatasetProfilingRun(Resource, Base): - __tablename__ = 'dataset_profiling_run' - profilingRunUri = Column( - String, primary_key=True, default=utils.uuid('profilingrun') - ) - datasetUri = Column(String, nullable=False) - GlueJobName = Column(String) - GlueJobRunId = Column(String) - GlueTriggerSchedule = Column(String) - GlueTriggerName = Column(String) - GlueTableName = Column(String) - AwsAccountId = Column(String) - results = Column(JSON, default={}) - status = Column(String, default='Created') diff --git a/backend/dataall/db/models/DatasetQualityRule.py b/backend/dataall/db/models/DatasetQualityRule.py deleted file mode 100644 index d5befa805..000000000 --- a/backend/dataall/db/models/DatasetQualityRule.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.dialects.postgresql import JSON - -from .. import Base, Resource, utils - - -class DatasetQualityRule(Resource, Base): - __tablename__ = 'dataset_quality_rule' - datasetUri = Column(String, nullable=False) - ruleUri = Column(String, primary_key=True, default=utils.uuid('dqlrule')) - query = Column(String, nullable=False) - status = Column(String, nullable=False, default='Created') - logs = Column(JSON, default={}) diff --git a/backend/dataall/db/models/DatasetStorageLocation.py b/backend/dataall/db/models/DatasetStorageLocation.py deleted file mode 100644 index 33b121438..000000000 --- a/backend/dataall/db/models/DatasetStorageLocation.py +++ /dev/null @@ -1,19 +0,0 @@ -from sqlalchemy import Boolean, Column, String -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class DatasetStorageLocation(Resource, Base): - __tablename__ = 'dataset_storage_location' - datasetUri = Column(String, nullable=False) - locationUri = Column(String, primary_key=True, default=utils.uuid('location')) - AWSAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - S3Prefix = Column(String, nullable=False) - S3AccessPoint = Column(String, nullable=True) - region = Column(String, default='eu-west-1') - locationCreated = Column(Boolean, default=False) - userRoleForStorageLocation = query_expression() - projectPermission = query_expression() - environmentEndPoint = query_expression() diff --git a/backend/dataall/db/models/DatasetTable.py b/backend/dataall/db/models/DatasetTable.py deleted file mode 100644 index a1b06b192..000000000 --- a/backend/dataall/db/models/DatasetTable.py +++ /dev/null @@ -1,29 +0,0 @@ -from sqlalchemy import Column, String, Text -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class DatasetTable(Resource, Base): - __tablename__ = 'dataset_table' - datasetUri = Column(String, nullable=False) - tableUri = Column(String, primary_key=True, default=utils.uuid('table')) - AWSAccountId = Column(String, nullable=False) - S3BucketName = Column(String, nullable=False) - S3Prefix = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=False) - GlueTableName = Column(String, nullable=False) - GlueTableConfig = Column(Text) - GlueTableProperties = Column(postgresql.JSON, default={}) - LastGlueTableStatus = Column(String, default='InSync') - region = Column(String, default='eu-west-1') - # LastGeneratedPreviewDate= Column(DateTime, default=None) - confidentiality = Column(String, nullable=True) - userRoleForTable = query_expression() - projectPermission = query_expression() - redshiftClusterPermission = query_expression() - stage = Column(String, default='RAW') - topics = Column(postgresql.ARRAY(String), nullable=True) - confidentiality = Column(String, nullable=False, default='C1') diff --git a/backend/dataall/db/models/DatasetTableColumn.py b/backend/dataall/db/models/DatasetTableColumn.py deleted file mode 100644 index f4fe1f7d6..000000000 --- a/backend/dataall/db/models/DatasetTableColumn.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import Column, String - -from .. import Base -from .. import Resource, utils - - -class DatasetTableColumn(Resource, Base): - __tablename__ = 'dataset_table_column' - datasetUri = Column(String, nullable=False) - tableUri = Column(String, nullable=False) - columnUri = Column(String, primary_key=True, default=utils.uuid('col')) - AWSAccountId = Column(String, nullable=False) - region = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=False) - GlueTableName = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - typeName = Column(String, nullable=False) - columnType = Column( - String, default='column' - ) # can be either "column" or "partition" diff --git a/backend/dataall/db/models/DatasetTableProfilingJob.py b/backend/dataall/db/models/DatasetTableProfilingJob.py deleted file mode 100644 index ea0fedbf0..000000000 --- a/backend/dataall/db/models/DatasetTableProfilingJob.py +++ /dev/null @@ -1,18 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class DatasetTableProfilingJob(Resource, Base): - __tablename__ = 'dataset_table_profiling_job' - tableUri = Column(String, nullable=False) - jobUri = Column(String, primary_key=True, default=utils.uuid('profilingjob')) - AWSAccountId = Column(String, nullable=False) - RunCommandId = Column(String, nullable=True) - GlueDatabaseName = Column(String, nullable=False) - GlueTableName = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - status = Column(String, default='') - userRoleForJob = query_expression() diff --git a/backend/dataall/db/models/Enums.py b/backend/dataall/db/models/Enums.py deleted file mode 100644 index 469eafa7a..000000000 --- a/backend/dataall/db/models/Enums.py +++ /dev/null @@ -1,202 +0,0 @@ -from enum import Enum - - -class OrganisationUserRole(Enum): - Owner = '999' - Admin = '900' - Member = '100' - NotMember = '000' - Invited = '800' - - -class GroupMemberRole(Enum): - Owner = 'Owner' - Admin = 'Admin' - Member = 'Member' - NotMember = 'NotMember' - - -class EnvironmentPermission(Enum): - Owner = '999' - Admin = '900' - DatasetCreator = '800' - Invited = '200' - ProjectAccess = '050' - NotInvited = '000' - - -class EnvironmentType(Enum): - Data = 'Data' - Compute = 'Compute' - - -class ProjectMemberRole(Enum): - ProjectCreator = '999' - Admin = '900' - NotContributor = '000' - - -class DashboardRole(Enum): - Creator = '999' - Admin = '900' - Shared = '800' - NoPermission = '000' - - -class DataPipelineRole(Enum): - Creator = '999' - Admin = '900' - NoPermission = '000' - - -class DatasetRole(Enum): - # Permissions on a dataset - BusinessOwner = '999' - DataSteward = '998' - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class RedshiftClusterRole(Enum): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class ScheduledQueryRole(Enum): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SagemakerNotebookRole(Enum): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SagemakerStudioRole(Enum): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class AirflowClusterRole(Enum): - Creator = '950' - Admin = '900' - Shared = '300' - NoPermission = '000' - - -class SortDirection(Enum): - asc = 'asc' - desc = 'desc' - - -class ShareableType(Enum): - Table = 'DatasetTable' - StorageLocation = 'DatasetStorageLocation' - View = 'View' - - -class PrincipalType(Enum): - Any = 'Any' - Organization = 'Organization' - Environment = 'Environment' - User = 'User' - Project = 'Project' - Public = 'Public' - Group = 'Group' - ConsumptionRole = 'ConsumptionRole' - - -class ShareObjectPermission(Enum): - Approvers = '999' - Requesters = '800' - DatasetAdmins = '700' - NoPermission = '000' - - -class ShareObjectStatus(Enum): - Deleted = 'Deleted' - Approved = 'Approved' - Rejected = 'Rejected' - Revoked = 'Revoked' - Draft = 'Draft' - Submitted = 'Submitted' - Revoke_In_Progress = 'Revoke_In_Progress' - Share_In_Progress = 'Share_In_Progress' - Processed = 'Processed' - - -class ShareItemStatus(Enum): - Deleted = 'Deleted' - PendingApproval = 'PendingApproval' - Share_Approved = 'Share_Approved' - Share_Rejected = 'Share_Rejected' - Share_In_Progress = 'Share_In_Progress' - Share_Succeeded = 'Share_Succeeded' - Share_Failed = 'Share_Failed' - Revoke_Approved = 'Revoke_Approved' - Revoke_In_Progress = 'Revoke_In_Progress' - Revoke_Failed = 'Revoke_Failed' - Revoke_Succeeded = 'Revoke_Succeeded' - - -class ShareObjectActions(Enum): - Submit = 'Submit' - Approve = 'Approve' - Reject = 'Reject' - RevokeItems = 'RevokeItems' - Start = 'Start' - Finish = 'Finish' - FinishPending = 'FinishPending' - Delete = 'Delete' - - -class ShareItemActions(Enum): - AddItem = 'AddItem' - RemoveItem = 'RemoveItem' - Failure = 'Failure' - Success = 'Success' - - -class ConfidentialityClassification(Enum): - Unclassified = 'Unclassified' - Official = 'Official' - Secret = 'Secret' - - -class Language(Enum): - English = 'English' - French = 'French' - German = 'German' - - -class Topic(Enum): - Finances = 'Finances' - HumanResources = 'HumanResources' - Products = 'Products' - Services = 'Services' - Operations = 'Operations' - Research = 'Research' - Sales = 'Sales' - Orders = 'Orders' - Sites = 'Sites' - Energy = 'Energy' - Customers = 'Customers' - Misc = 'Misc' - - -class WorksheetRole(Enum): - Creator = '950' - Admin = '900' - SharedWithWritePermission = '500' - SharedWithReadPermission = '400' - NoPermission = '000' diff --git a/backend/dataall/db/models/Environment.py b/backend/dataall/db/models/Environment.py deleted file mode 100644 index 295f56dac..000000000 --- a/backend/dataall/db/models/Environment.py +++ /dev/null @@ -1,42 +0,0 @@ -from sqlalchemy import Boolean, Column, String -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class Environment(Resource, Base): - __tablename__ = 'environment' - organizationUri = Column(String, nullable=False) - environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) - AwsAccountId = Column(String, nullable=False) - region = Column(String, nullable=False, default='eu-west-1') - cognitoGroupName = Column(String, nullable=True) - resourcePrefix = Column(String, nullable=False, default='dataall') - - validated = Column(Boolean, default=False) - environmentType = Column(String, nullable=False, default='Data') - isOrganizationDefaultEnvironment = Column(Boolean, default=False) - EnvironmentDefaultIAMRoleName = Column(String, nullable=False) - EnvironmentDefaultIAMRoleImported = Column(Boolean, default=False) - EnvironmentDefaultIAMRoleArn = Column(String, nullable=False) - EnvironmentDefaultBucketName = Column(String) - EnvironmentDefaultAthenaWorkGroup = Column(String) - roleCreated = Column(Boolean, nullable=False, default=False) - - dashboardsEnabled = Column(Boolean, default=False) - notebooksEnabled = Column(Boolean, default=True) - mlStudiosEnabled = Column(Boolean, default=True) - pipelinesEnabled = Column(Boolean, default=True) - warehousesEnabled = Column(Boolean, default=True) - - userRoleInEnvironment = query_expression() - - SamlGroupName = Column(String, nullable=True) - CDKRoleArn = Column(String, nullable=False) - - subscriptionsEnabled = Column(Boolean, default=False) - subscriptionsProducersTopicName = Column(String) - subscriptionsProducersTopicImported = Column(Boolean, default=False) - subscriptionsConsumersTopicName = Column(String) - subscriptionsConsumersTopicImported = Column(Boolean, default=False) diff --git a/backend/dataall/db/models/EnvironmentGroup.py b/backend/dataall/db/models/EnvironmentGroup.py deleted file mode 100644 index a9547cf5b..000000000 --- a/backend/dataall/db/models/EnvironmentGroup.py +++ /dev/null @@ -1,26 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String, Boolean - -from .Enums import EnvironmentPermission as EnvironmentPermissionEnum -from .. import Base - - -class EnvironmentGroup(Base): - __tablename__ = 'environment_group_permission' - groupUri = Column(String, primary_key=True) - environmentUri = Column(String, primary_key=True) - invitedBy = Column(String, nullable=True) - environmentIAMRoleArn = Column(String, nullable=True) - environmentIAMRoleName = Column(String, nullable=True) - environmentIAMRoleImported = Column(Boolean, default=False) - environmentAthenaWorkGroup = Column(String, nullable=True) - description = Column(String, default='No description provided') - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) - - # environmentRole is the role of the entity (group or user) in the Environment - groupRoleInEnvironment = Column( - String, nullable=False, default=EnvironmentPermissionEnum.Invited.value - ) diff --git a/backend/dataall/db/models/FeedMessage.py b/backend/dataall/db/models/FeedMessage.py deleted file mode 100644 index 5ef382b35..000000000 --- a/backend/dataall/db/models/FeedMessage.py +++ /dev/null @@ -1,15 +0,0 @@ -from datetime import datetime - -from sqlalchemy import Column, String, DateTime - -from .. import Base, utils - - -class FeedMessage(Base): - __tablename__ = 'feed_message' - feedMessageUri = Column(String, primary_key=True, default=utils.uuid('_')) - creator = Column(String, nullable=False) - created = Column(DateTime, nullable=False, default=datetime.now) - content = Column(String, nullable=True) - targetUri = Column(String, nullable=False) - targetType = Column(String, nullable=False) diff --git a/backend/dataall/db/models/Glossary.py b/backend/dataall/db/models/Glossary.py deleted file mode 100644 index a17cef308..000000000 --- a/backend/dataall/db/models/Glossary.py +++ /dev/null @@ -1,67 +0,0 @@ -import enum -from datetime import datetime - -from sqlalchemy import Boolean, Column, String, DateTime, Enum -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import utils - - -class GlossaryNodeStatus(enum.Enum): - draft = 'draft' - approved = 'approved' - expired = 'expired' - alert = 'alert' - archived = 'archived' - - -class GlossaryNode(Base): - __tablename__ = 'glossary_node' - nodeUri = Column(String, primary_key=True, default=utils.uuid('glossary_node')) - parentUri = Column(String, nullable=True) - nodeType = Column(String, default='G') - status = Column( - String, Enum(GlossaryNodeStatus), default=GlossaryNodeStatus.draft.value - ) - path = Column(String, nullable=False) - label = Column(String, nullable=False) - readme = Column(String, nullable=False) - created = Column(DateTime, default=datetime.now) - updated = Column(DateTime, nullable=True, onupdate=datetime.now) - deleted = Column(DateTime, nullable=True) - owner = Column(String, nullable=False) - admin = Column(String, nullable=True) - isLinked = query_expression() - isMatch = query_expression() - - -class GlossarySchemaDefinition: - __tablename__ = 'glossary_schema' - schemaUri = Column(String, primary_key=True, default=utils.uuid('glossary_schema')) - json_schema = Column(postgresql.JSON, nullable=False) - - -class GlossarySchemaMap: - __tablename__ = 'glossary_schema_map' - schemaUri = Column(String, primary_key=True, nullable=False) - nodeUri = Column(String, primary_key=True, nullable=False) - schema = Column(postgresql.JSON, nullable=False) - - -class TermLink(Base): - __tablename__ = 'term_link' - linkUri = Column(String, primary_key=True, default=utils.uuid('term_link')) - nodeUri = Column(String, nullable=False) - targetUri = Column(String, nullable=False) - targetType = Column(String, nullable=False) - approvedBySteward = Column(Boolean, default=False) - approvedByOwner = Column(Boolean, default=False) - owner = Column(String, nullable=False) - created = Column(DateTime, default=datetime.now) - updated = Column(DateTime, nullable=True, onupdate=datetime.now) - deleted = Column(DateTime, nullable=True) - path = query_expression() - label = query_expression() - readme = query_expression() diff --git a/backend/dataall/db/models/Group.py b/backend/dataall/db/models/Group.py deleted file mode 100644 index c68155773..000000000 --- a/backend/dataall/db/models/Group.py +++ /dev/null @@ -1,8 +0,0 @@ -from sqlalchemy import Column, String - -from .. import Base, Resource, utils - - -class Group(Resource, Base): - __tablename__ = 'group' - groupUri = Column(String, primary_key=True, default=utils.uuid('group')) diff --git a/backend/dataall/db/models/GroupMember.py b/backend/dataall/db/models/GroupMember.py deleted file mode 100644 index 18f3176ea..000000000 --- a/backend/dataall/db/models/GroupMember.py +++ /dev/null @@ -1,19 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String - -from .Enums import GroupMemberRole -from .. import Base - - -class GroupMember(Base): - __tablename__ = 'group_member' - groupUri = Column(String, primary_key=True) - userName = Column(String, primary_key=True) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) - - userRoleInGroup = Column( - String, nullable=False, default=GroupMemberRole.Member.value - ) diff --git a/backend/dataall/db/models/KeyValueTag.py b/backend/dataall/db/models/KeyValueTag.py deleted file mode 100644 index f81f0b82b..000000000 --- a/backend/dataall/db/models/KeyValueTag.py +++ /dev/null @@ -1,14 +0,0 @@ -from sqlalchemy import Column, String, Boolean - -from .. import Base -from .. import Resource, utils - - -class KeyValueTag(Base): - __tablename__ = 'keyvaluetag' - tagUri = Column(String, primary_key=True, default=utils.uuid('keyvaluetag')) - targetUri = Column(String, nullable=False) - targetType = Column(String, nullable=False) - key = Column(String, nullable=False) - value = Column(String, nullable=False) - cascade = Column(Boolean, default=False) diff --git a/backend/dataall/db/models/Notification.py b/backend/dataall/db/models/Notification.py deleted file mode 100644 index 1fcb55fb4..000000000 --- a/backend/dataall/db/models/Notification.py +++ /dev/null @@ -1,31 +0,0 @@ -import enum -from datetime import datetime - -from sqlalchemy import Column, String, Boolean, Enum, DateTime - -from .. import Base -from .. import utils - - -class NotificationType(enum.Enum): - SHARE_OBJECT_SUBMITTED = 'SHARE_OBJECT_SUBMITTED' - SHARE_ITEM_REQUEST = 'SHARE_ITEM_REQUEST' - SHARE_OBJECT_APPROVED = 'SHARE_OBJECT_APPROVED' - SHARE_OBJECT_REJECTED = 'SHARE_OBJECT_REJECTED' - SHARE_OBJECT_PENDING_APPROVAL = 'SHARE_OBJECT_PENDING_APPROVAL' - DATASET_VERSION = 'DATASET_VERSION' - - -class Notification(Base): - __tablename__ = 'notification' - notificationUri = Column( - String, primary_key=True, default=utils.uuid('notificationtype') - ) - type = Column(Enum(NotificationType), nullable=True) - message = Column(String, nullable=False) - username = Column(String, nullable=False) - is_read = Column(Boolean, nullable=False, default=False) - target_uri = Column(String) - created = Column(DateTime, default=datetime.now) - updated = Column(DateTime, onupdate=datetime.now) - deleted = Column(DateTime) diff --git a/backend/dataall/db/models/Organization.py b/backend/dataall/db/models/Organization.py deleted file mode 100644 index 72b8bf044..000000000 --- a/backend/dataall/db/models/Organization.py +++ /dev/null @@ -1,17 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class Organization(Resource, Base): - __tablename__ = 'organization' - organizationUri = Column( - String, primary_key=True, default=utils.uuid('organization') - ) - - # `role` is a dynamically generated SQL expression - # computing the role of the user in an organization - userRoleInOrganization = query_expression() - SamlGroupName = Column(String, nullable=True) diff --git a/backend/dataall/db/models/OrganizationGroup.py b/backend/dataall/db/models/OrganizationGroup.py deleted file mode 100644 index 0a9fa65d9..000000000 --- a/backend/dataall/db/models/OrganizationGroup.py +++ /dev/null @@ -1,17 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String - -from .Enums import EnvironmentPermission as EnvironmentPermissionEnum -from .. import Base - - -class OrganizationGroup(Base): - __tablename__ = 'organization_group' - groupUri = Column(String, primary_key=True) - organizationUri = Column(String, primary_key=True) - invitedBy = Column(String, nullable=True) - description = Column(String, default='No description provided') - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) diff --git a/backend/dataall/db/models/Permission.py b/backend/dataall/db/models/Permission.py deleted file mode 100644 index d29fb274b..000000000 --- a/backend/dataall/db/models/Permission.py +++ /dev/null @@ -1,21 +0,0 @@ -import datetime -import enum - -from sqlalchemy import Column, String, DateTime, Enum - -from .. import Base, utils - - -class PermissionType(enum.Enum): - TENANT = 'TENANT' - RESOURCE = 'RESOURCE' - - -class Permission(Base): - __tablename__ = 'permission' - permissionUri = Column(String, primary_key=True, default=utils.uuid('permission')) - name = Column(String, nullable=False, index=True) - type = Column(Enum(PermissionType), nullable=False) - description = Column(String, nullable=False) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/RedshiftCluster.py b/backend/dataall/db/models/RedshiftCluster.py deleted file mode 100644 index db40200ae..000000000 --- a/backend/dataall/db/models/RedshiftCluster.py +++ /dev/null @@ -1,41 +0,0 @@ -from sqlalchemy import Column, String, ARRAY, Integer, Boolean -from sqlalchemy.orm import query_expression - -from .. import utils, Resource, Base - - -class RedshiftCluster(Resource, Base): - __tablename__ = 'redshiftcluster' - environmentUri = Column(String, nullable=False) - organizationUri = Column(String, nullable=False) - clusterUri = Column(String, primary_key=True, default=utils.uuid('cluster')) - clusterArn = Column(String) - clusterName = Column(String) - description = Column(String) - databaseName = Column(String, default='datahubdb') - databaseUser = Column(String, default='datahubuser') - masterUsername = Column(String) - masterDatabaseName = Column(String) - nodeType = Column(String) - numberOfNodes = Column(Integer) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String) - kmsAlias = Column(String) - status = Column(String, default='CREATING') - vpc = Column(String) - subnetGroupName = Column(String) - subnetIds = Column(ARRAY(String), default=[]) - securityGroupIds = Column(ARRAY(String), default=[]) - CFNStackName = Column(String) - CFNStackStatus = Column(String) - CFNStackArn = Column(String) - IAMRoles = Column(ARRAY(String), default=[]) - endpoint = Column(String) - port = Column(Integer) - datahubSecret = Column(String) - masterSecret = Column(String) - external_schema_created = Column(Boolean, default=False) - SamlGroupName = Column(String) - imported = Column(Boolean, default=False) - userRoleForCluster = query_expression() - userRoleInEnvironment = query_expression() diff --git a/backend/dataall/db/models/RedshiftClusterDataset.py b/backend/dataall/db/models/RedshiftClusterDataset.py deleted file mode 100644 index cfed208a8..000000000 --- a/backend/dataall/db/models/RedshiftClusterDataset.py +++ /dev/null @@ -1,17 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String, Boolean -from sqlalchemy.orm import query_expression - -from .. import Base - - -class RedshiftClusterDataset(Base): - __tablename__ = 'redshiftcluster_dataset' - clusterUri = Column(String, nullable=False, primary_key=True) - datasetUri = Column(String, nullable=False, primary_key=True) - datasetCopyEnabled = Column(Boolean, default=True) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) - userRoleForDataset = query_expression() diff --git a/backend/dataall/db/models/RedshiftClusterDatasetTable.py b/backend/dataall/db/models/RedshiftClusterDatasetTable.py deleted file mode 100644 index 1dcd8dc5a..000000000 --- a/backend/dataall/db/models/RedshiftClusterDatasetTable.py +++ /dev/null @@ -1,20 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String, Boolean - -from .. import Base - - -class RedshiftClusterDatasetTable(Base): - __tablename__ = 'redshiftcluster_datasettable' - clusterUri = Column(String, nullable=False, primary_key=True) - datasetUri = Column(String, nullable=False, primary_key=True) - tableUri = Column(String, nullable=False, primary_key=True) - shareUri = Column(String) - enabled = Column(Boolean, default=False) - schema = Column(String, nullable=False) - databaseName = Column(String, nullable=False) - dataLocation = Column(String, nullable=True) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - deleted = Column(DateTime) diff --git a/backend/dataall/db/models/ResourcePolicy.py b/backend/dataall/db/models/ResourcePolicy.py deleted file mode 100644 index 538a11872..000000000 --- a/backend/dataall/db/models/ResourcePolicy.py +++ /dev/null @@ -1,27 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime, Enum as DBEnum -from sqlalchemy.orm import relationship - -from .. import Base, utils - - -class ResourcePolicy(Base): - __tablename__ = 'resource_policy' - - sid = Column(String, primary_key=True, default=utils.uuid('resource_policy')) - - resourceUri = Column(String, nullable=False, index=True) - resourceType = Column(String, nullable=False, index=True) - - principalId = Column(String, nullable=False, index=True) - principalType = Column( - DBEnum('USER', 'GROUP', 'SERVICE', name='rp_principal_type'), default='GROUP' - ) - - permissions = relationship( - 'ResourcePolicyPermission', uselist=True, backref='resource_policy' - ) - - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/ResourcePolicyPermission.py b/backend/dataall/db/models/ResourcePolicyPermission.py deleted file mode 100644 index a7384c83b..000000000 --- a/backend/dataall/db/models/ResourcePolicyPermission.py +++ /dev/null @@ -1,20 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime, ForeignKey -from sqlalchemy.orm import relationship - -from .. import Base -from . import ResourcePolicy -from . import Permission - - -class ResourcePolicyPermission(Base): - __tablename__ = 'resource_policy_permission' - - sid = Column(String, ForeignKey(ResourcePolicy.sid), primary_key=True) - permissionUri = Column( - String, ForeignKey(Permission.permissionUri), primary_key=True - ) - permission = relationship('Permission') - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/SagemakerNotebook.py b/backend/dataall/db/models/SagemakerNotebook.py deleted file mode 100644 index 675ebf334..000000000 --- a/backend/dataall/db/models/SagemakerNotebook.py +++ /dev/null @@ -1,24 +0,0 @@ -from sqlalchemy import Column, String, Integer -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class SagemakerNotebook(Resource, Base): - __tablename__ = 'sagemaker_notebook' - environmentUri = Column(String, nullable=False) - notebookUri = Column(String, primary_key=True, default=utils.uuid('notebook')) - NotebookInstanceName = Column( - String, nullable=False, default=utils.slugifier('label') - ) - NotebookInstanceStatus = Column(String, nullable=False) - AWSAccountId = Column(String, nullable=False) - RoleArn = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - SamlAdminGroupName = Column(String, nullable=True) - VpcId = Column(String, nullable=True) - SubnetId = Column(String, nullable=True) - VolumeSizeInGB = Column(Integer, nullable=True) - InstanceType = Column(String, nullable=True) - userRoleForNotebook = query_expression() diff --git a/backend/dataall/db/models/SagemakerStudio.py b/backend/dataall/db/models/SagemakerStudio.py deleted file mode 100644 index 3d469f0c9..000000000 --- a/backend/dataall/db/models/SagemakerStudio.py +++ /dev/null @@ -1,38 +0,0 @@ -from sqlalchemy import Column, String -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class SagemakerStudio(Resource, Base): - __tablename__ = 'sagemaker_studio_domain' - environmentUri = Column(String, nullable=False) - sagemakerStudioUri = Column( - String, primary_key=True, default=utils.uuid('sagemakerstudio') - ) - sagemakerStudioDomainID = Column(String, nullable=False) - SagemakerStudioStatus = Column(String, nullable=False) - AWSAccountId = Column(String, nullable=False) - RoleArn = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - userRoleForSagemakerStudio = query_expression() - - -class SagemakerStudioUserProfile(Resource, Base): - __tablename__ = 'sagemaker_studio_user_profile' - environmentUri = Column(String, nullable=False) - sagemakerStudioUserProfileUri = Column( - String, primary_key=True, default=utils.uuid('sagemakerstudiouserprofile') - ) - sagemakerStudioUserProfileStatus = Column(String, nullable=False) - sagemakerStudioUserProfileName = Column(String, nullable=False) - sagemakerStudioUserProfileNameSlugify = Column( - String, nullable=False, default=utils.slugifier('label') - ) - sagemakerStudioDomainID = Column(String, nullable=False) - AWSAccountId = Column(String, nullable=False) - RoleArn = Column(String, nullable=False) - region = Column(String, default='eu-west-1') - SamlAdminGroupName = Column(String, nullable=True) - userRoleForSagemakerStudioUserProfile = query_expression() diff --git a/backend/dataall/db/models/ShareObject.py b/backend/dataall/db/models/ShareObject.py deleted file mode 100644 index 403de15a7..000000000 --- a/backend/dataall/db/models/ShareObject.py +++ /dev/null @@ -1,39 +0,0 @@ -from datetime import datetime, timedelta -from uuid import uuid4 - -from sqlalchemy import Boolean, Column, String, DateTime -from sqlalchemy.orm import query_expression - -from .Enums import ShareObjectStatus -from .. import Base, utils - - -def in_one_month(): - return datetime.now() + timedelta(days=31) - - -def _uuid4(): - return str(uuid4()) - - -class ShareObject(Base): - __tablename__ = 'share_object' - shareUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('share') - ) - datasetUri = Column(String, nullable=False) - environmentUri = Column(String) - groupUri = Column(String) - principalIAMRoleName = Column(String, nullable=True) - principalId = Column(String, nullable=True) - principalType = Column(String, nullable=True, default='Group') - status = Column(String, nullable=False, default=ShareObjectStatus.Draft.value) - owner = Column(String, nullable=False) - created = Column(DateTime, default=datetime.now) - updated = Column(DateTime, onupdate=datetime.now) - deleted = Column(DateTime) - confirmed = Column(Boolean, default=False) - requestPurpose = Column(String, nullable=True) - rejectPurpose = Column(String, nullable=True) - userRoleForShareObject = query_expression() - existingSharedItems = query_expression() diff --git a/backend/dataall/db/models/ShareObjectItem.py b/backend/dataall/db/models/ShareObjectItem.py deleted file mode 100644 index dac037687..000000000 --- a/backend/dataall/db/models/ShareObjectItem.py +++ /dev/null @@ -1,27 +0,0 @@ -from datetime import datetime - -from sqlalchemy import Column, DateTime, String - -from .Enums import ShareItemStatus -from .. import Base, utils - - -class ShareObjectItem(Base): - __tablename__ = 'share_object_item' - shareUri = Column(String, nullable=False) - shareItemUri = Column( - String, default=utils.uuid('shareitem'), nullable=False, primary_key=True - ) - itemType = Column(String, nullable=False) - itemUri = Column(String, nullable=False) - itemName = Column(String, nullable=False) - permission = Column(String, nullable=True) - created = Column(DateTime, nullable=False, default=datetime.now) - updated = Column(DateTime, nullable=True, onupdate=datetime.now) - deleted = Column(DateTime, nullable=True) - owner = Column(String, nullable=False) - GlueDatabaseName = Column(String, nullable=True) - GlueTableName = Column(String, nullable=True) - S3AccessPointName = Column(String, nullable=True) - status = Column(String, nullable=False, default=ShareItemStatus.PendingApproval.value) - action = Column(String, nullable=True) diff --git a/backend/dataall/db/models/Stack.py b/backend/dataall/db/models/Stack.py deleted file mode 100644 index d168f64c1..000000000 --- a/backend/dataall/db/models/Stack.py +++ /dev/null @@ -1,33 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String -from sqlalchemy.dialects import postgresql - -from .. import Base -from .. import utils - - -class Stack(Base): - __tablename__ = 'stack' - stackUri = Column( - String, nullable=False, default=utils.uuid('stack'), primary_key=True - ) - name = Column(String, nullable=True) - targetUri = Column(String, nullable=False) - accountid = Column(String, nullable=False) - region = Column(String, nullable=False) - cronexpr = Column(String, nullable=True) - status = Column(String, nullable=False, default='pending') - stack = Column(String, nullable=False) - payload = Column(postgresql.JSON, nullable=True) - created = Column(DateTime, default=datetime.datetime.now()) - updated = Column(DateTime, onupdate=datetime.datetime.now()) - stackid = Column(String) - outputs = Column(postgresql.JSON) - resources = Column(postgresql.JSON) - error = Column(postgresql.JSON) - events = Column(postgresql.JSON) - lastSeen = Column( - DateTime, default=lambda: datetime.datetime(year=1900, month=1, day=1) - ) - EcsTaskArn = Column(String, nullable=True) diff --git a/backend/dataall/db/models/Tag.py b/backend/dataall/db/models/Tag.py deleted file mode 100644 index 537d04fe1..000000000 --- a/backend/dataall/db/models/Tag.py +++ /dev/null @@ -1,37 +0,0 @@ -from datetime import datetime - -from sqlalchemy import Column, DateTime, String - -from .. import Base -from .. import utils - - -class Tag(Base): - __tablename__ = 'tag' - id = Column(String, primary_key=True, default=utils.uuid('tag')) - tag = Column(String, nullable=False) - owner = Column(String) - created = Column(DateTime, default=datetime.now) - - -class ItemTags(Base): - __tablename__ = 'item_tags' - tagid = Column(String, primary_key=True) - itemid = Column(String, primary_key=True) - - -def updateObjectTags(session, username, uri: str = None, tags=[]): - ids = {} - session.query(ItemTags).filter(ItemTags.itemid == uri).delete() - if tags: - for t in set(tags or []): - exists = session.query(Tag).filter(Tag.tag == t).first() - if exists: - id = exists.id - else: - id = utils.uuid('tag')(None) - tag = Tag(id=id, tag=t, owner=username) - session.add(tag) - session.commit() - link = ItemTags(tagid=id, itemid=uri) - session.add(link) diff --git a/backend/dataall/db/models/Task.py b/backend/dataall/db/models/Task.py deleted file mode 100644 index c53f03990..000000000 --- a/backend/dataall/db/models/Task.py +++ /dev/null @@ -1,26 +0,0 @@ -import datetime - -from sqlalchemy import Column, DateTime, String -from sqlalchemy.dialects import postgresql - -from .. import Base -from .. import utils - - -class Task(Base): - __tablename__ = 'task' - taskUri = Column( - String, nullable=False, default=utils.uuid('Task'), primary_key=True - ) - targetUri = Column(String, nullable=False) - cronexpr = Column(String, nullable=True) - status = Column(String, nullable=False, default='pending') - action = Column(String, nullable=False) - payload = Column(postgresql.JSON, nullable=True) - created = Column(DateTime, default=datetime.datetime.now()) - updated = Column(DateTime, onupdate=datetime.datetime.now()) - response = Column(postgresql.JSON) - error = Column(postgresql.JSON) - lastSeen = Column( - DateTime, default=lambda: datetime.datetime(year=1900, month=1, day=1) - ) diff --git a/backend/dataall/db/models/Tenant.py b/backend/dataall/db/models/Tenant.py deleted file mode 100644 index 6ee0d5cac..000000000 --- a/backend/dataall/db/models/Tenant.py +++ /dev/null @@ -1,14 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime - -from .. import Base, utils - - -class Tenant(Base): - __tablename__ = 'tenant' - tenantUri = Column(String, primary_key=True, default=utils.uuid('tenant')) - name = Column(String, nullable=False, index=True, unique=True) - description = Column(String, default='No description provided') - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/TenantAdministrator.py b/backend/dataall/db/models/TenantAdministrator.py deleted file mode 100644 index 9ca916a66..000000000 --- a/backend/dataall/db/models/TenantAdministrator.py +++ /dev/null @@ -1,11 +0,0 @@ -from sqlalchemy import Column, String, ForeignKey - -from .Tenant import Tenant -from .. import Base - - -class TenantAdministrator(Base): - __tablename__ = 'tenant_administrator' - userName = Column(String, primary_key=True, nullable=False) - tenantUri = Column(String, ForeignKey(Tenant.tenantUri), nullable=False) - userRoleInTenant = Column(String, nullable=False, default='ADMIN') diff --git a/backend/dataall/db/models/TenantPolicy.py b/backend/dataall/db/models/TenantPolicy.py deleted file mode 100644 index 5946e769f..000000000 --- a/backend/dataall/db/models/TenantPolicy.py +++ /dev/null @@ -1,28 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime, ForeignKey, Enum as DBEnum -from sqlalchemy.orm import relationship - -from .. import Base, utils - - -class TenantPolicy(Base): - __tablename__ = 'tenant_policy' - - sid = Column(String, primary_key=True, default=utils.uuid('tenant_policy')) - - tenantUri = Column(String, ForeignKey('tenant.tenantUri'), nullable=False) - tenant = relationship('Tenant') - - principalId = Column(String, nullable=False, index=True) - principalType = Column( - DBEnum('USER', 'GROUP', 'SERVICE', name='tenant_principal_type'), - default='GROUP', - ) - - permissions = relationship( - 'TenantPolicyPermission', uselist=True, backref='tenant_policy' - ) - - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/TenantPolicyPermission.py b/backend/dataall/db/models/TenantPolicyPermission.py deleted file mode 100644 index 6771bd4b0..000000000 --- a/backend/dataall/db/models/TenantPolicyPermission.py +++ /dev/null @@ -1,20 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, DateTime, ForeignKey -from sqlalchemy.orm import relationship - -from . import Permission -from . import TenantPolicy -from .. import Base - - -class TenantPolicyPermission(Base): - __tablename__ = 'tenant_policy_permission' - - sid = Column(String, ForeignKey(TenantPolicy.sid), primary_key=True) - permissionUri = Column( - String, ForeignKey(Permission.permissionUri), primary_key=True - ) - permission = relationship('Permission') - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/User.py b/backend/dataall/db/models/User.py deleted file mode 100644 index 136db76bb..000000000 --- a/backend/dataall/db/models/User.py +++ /dev/null @@ -1,10 +0,0 @@ -from sqlalchemy import Column, String - -from .. import Base -from .. import utils - - -class User(Base): - __tablename__ = 'user' - userId = Column(String, primary_key=True, default=utils.uuid('user')) - userName = Column(String, nullable=False) diff --git a/backend/dataall/db/models/Vote.py b/backend/dataall/db/models/Vote.py deleted file mode 100644 index dfb523aaa..000000000 --- a/backend/dataall/db/models/Vote.py +++ /dev/null @@ -1,23 +0,0 @@ -import datetime - -from sqlalchemy import Column, String, Boolean, DateTime - -from .. import Base, utils - - -class Vote(Base): - __tablename__ = 'vote' - voteUri = Column(String, primary_key=True, default=utils.uuid('vote')) - username = Column(String, nullable=False) - targetUri = Column(String, nullable=False) - targetType = Column(String, nullable=False) - upvote = Column(Boolean, nullable=True) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) - - def __repr__(self): - if self.upvote: - vote = 'Up' - else: - vote = 'Down' - return f'' diff --git a/backend/dataall/db/models/Vpc.py b/backend/dataall/db/models/Vpc.py deleted file mode 100644 index 35ab18e0c..000000000 --- a/backend/dataall/db/models/Vpc.py +++ /dev/null @@ -1,21 +0,0 @@ -from sqlalchemy import Column, String, Boolean -from sqlalchemy.dialects.postgresql import ARRAY -from sqlalchemy.orm import query_expression - -from .. import Base, Resource, utils - - -class Vpc(Resource, Base): - __tablename__ = 'vpc' - environmentUri = Column(String, nullable=False) - vpcUri = Column( - String, nullable=False, primary_key=True, default=utils.uuid('vpcUri') - ) - region = Column(String, default='eu-west-1') - AwsAccountId = Column(String, nullable=False) - SamlGroupName = Column(String) - VpcId = Column(String, nullable=False) - privateSubnetIds = Column(ARRAY(String)) - publicSubnetIds = Column(ARRAY(String)) - default = Column(Boolean, default=False) - userRoleForPipeline = query_expression() diff --git a/backend/dataall/db/models/Worksheet.py b/backend/dataall/db/models/Worksheet.py deleted file mode 100644 index c78b12056..000000000 --- a/backend/dataall/db/models/Worksheet.py +++ /dev/null @@ -1,53 +0,0 @@ -import datetime -import enum - -from sqlalchemy import Column, Boolean, DateTime, Integer, Enum, String -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import query_expression - -from .. import Base -from .. import Resource, utils - - -class QueryType(enum.Enum): - chart = 'chart' - data = 'data' - - -class Worksheet(Resource, Base): - __tablename__ = 'worksheet' - worksheetUri = Column(String, primary_key=True, default=utils.uuid('_')) - SamlAdminGroupName = Column(String, nullable=False) - sqlBody = Column(String, nullable=True) - chartConfig = Column(postgresql.JSON, nullable=True) - userRoleForWorksheet = query_expression() - lastSavedAthenaQueryIdForQuery = Column(String, nullable=True) - lastSavedAthenaQueryIdForChart = Column(String, nullable=True) - - -class WorksheetQueryResult(Base): - __tablename__ = 'worksheet_query_result' - worksheetUri = Column(String, nullable=False) - AthenaQueryId = Column(String, primary_key=True) - status = Column(String, nullable=False) - queryType = Column(Enum(QueryType), nullable=False, default=True) - sqlBody = Column(String, nullable=False) - AwsAccountId = Column(String, nullable=False) - region = Column(String, nullable=False) - OutputLocation = Column(String, nullable=False) - error = Column(String, nullable=True) - ElapsedTimeInMs = Column(Integer, nullable=True) - DataScannedInBytes = Column(Integer, nullable=True) - created = Column(DateTime, default=datetime.datetime.now) - - -class WorksheetShare(Base): - __tablename__ = 'worksheet_share' - worksheetShareUri = Column(String, primary_key=True, default=utils.uuid('_')) - worksheetUri = Column(String, nullable=False) - principalId = Column(String, nullable=False) - principalType = Column(String, nullable=False) - canEdit = Column(Boolean, default=False) - owner = Column(String, nullable=False) - created = Column(DateTime, default=datetime.datetime.now) - updated = Column(DateTime, onupdate=datetime.datetime.now) diff --git a/backend/dataall/db/models/__init__.py b/backend/dataall/db/models/__init__.py deleted file mode 100644 index fdc5fbedf..000000000 --- a/backend/dataall/db/models/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -from .Enums import * -from .Activity import Activity -from .KeyValueTag import KeyValueTag -from .Dashboard import Dashboard -from .DashboardShare import DashboardShare -from .DashboardShare import DashboardShareStatus -from .Dataset import Dataset -from .DatasetProfilingRun import DatasetProfilingRun -from .DatasetQualityRule import DatasetQualityRule -from .DatasetStorageLocation import DatasetStorageLocation -from .DatasetTable import DatasetTable -from .DatasetTableColumn import DatasetTableColumn -from .DatasetTableProfilingJob import DatasetTableProfilingJob -from .Environment import Environment -from .EnvironmentGroup import EnvironmentGroup -from .FeedMessage import FeedMessage -from .Glossary import GlossaryNode, TermLink -from .Group import Group -from .ConsumptionRole import ConsumptionRole -from .GroupMember import GroupMember -from .Notification import Notification, NotificationType -from .Organization import Organization -from .OrganizationGroup import OrganizationGroup -from .Permission import Permission, PermissionType -from .RedshiftCluster import RedshiftCluster -from .RedshiftClusterDataset import RedshiftClusterDataset -from .RedshiftClusterDatasetTable import RedshiftClusterDatasetTable -from .ResourcePolicy import ResourcePolicy -from .ResourcePolicyPermission import ResourcePolicyPermission -from .SagemakerNotebook import SagemakerNotebook -from .SagemakerStudio import SagemakerStudio, SagemakerStudioUserProfile -from .ShareObject import ShareObject -from .ShareObjectItem import ShareObjectItem -from .DataPipeline import DataPipeline -from .DataPipelineEnvironment import DataPipelineEnvironment -from .Stack import Stack -from .Tag import Tag, ItemTags, updateObjectTags -from .Task import Task -from .Tenant import Tenant -from .TenantPolicy import TenantPolicy -from .TenantPolicyPermission import TenantPolicyPermission -from .TenantAdministrator import TenantAdministrator -from .User import User -from .Vpc import Vpc -from .Worksheet import Worksheet, WorksheetQueryResult, WorksheetShare -from .Vote import Vote diff --git a/backend/dataall/db/permissions.py b/backend/dataall/db/permissions.py deleted file mode 100644 index 1f79445ea..000000000 --- a/backend/dataall/db/permissions.py +++ /dev/null @@ -1,453 +0,0 @@ -""" -ORGANIZATION PERMISSIONS -""" -CREATE_ORGANIZATION = 'CREATE_ORGANIZATION' -UPDATE_ORGANIZATION = 'UPDATE_ORGANIZATION' -DELETE_ORGANIZATION = 'DELETE_ORGANIZATION' -GET_ORGANIZATION = 'GET_ORGANIZATION' -LINK_ENVIRONMENT = 'LINK_ENVIRONMENT' -INVITE_ORGANIZATION_GROUP = 'INVITE_ORGANIZATION_GROUP' -REMOVE_ORGANIZATION_GROUP = 'REMOVE_ORGANIZATION_GROUP' -ORGANIZATION_ALL = [ - CREATE_ORGANIZATION, - UPDATE_ORGANIZATION, - DELETE_ORGANIZATION, - LINK_ENVIRONMENT, - GET_ORGANIZATION, - INVITE_ORGANIZATION_GROUP, - REMOVE_ORGANIZATION_GROUP, -] -ORGANIZATION_INVITED = [LINK_ENVIRONMENT, GET_ORGANIZATION] - -""" -TENANT PERMISSIONS -""" -MANAGE_DATASETS = 'MANAGE_DATASETS' -MANAGE_REDSHIFT_CLUSTERS = 'MANAGE_REDSHIFT_CLUSTERS' -MANAGE_DASHBOARDS = 'MANAGE_DASHBOARDS' -MANAGE_NOTEBOOKS = 'MANAGE_NOTEBOOKS' -MANAGE_PIPELINES = 'MANAGE_PIPELINES' -MANAGE_GROUPS = 'MANAGE_GROUPS' -MANAGE_ENVIRONMENT = 'MANAGE_ENVIRONMENT' -MANAGE_WORKSHEETS = 'MANAGE_WORKSHEETS' -MANAGE_GLOSSARIES = 'MANAGE_GLOSSARIES' -MANAGE_ENVIRONMENTS = 'MANAGE_ENVIRONMENTS' -MANAGE_ORGANIZATIONS = 'MANAGE_ORGANIZATIONS' - -""" -ENVIRONMENT -""" -UPDATE_ENVIRONMENT = 'UPDATE_ENVIRONMENT' -GET_ENVIRONMENT = 'GET_ENVIRONMENT' -DELETE_ENVIRONMENT = 'DELETE_ENVIRONMENT' -INVITE_ENVIRONMENT_GROUP = 'INVITE_ENVIRONMENT_GROUP' -REMOVE_ENVIRONMENT_GROUP = 'REMOVE_ENVIRONMENT_GROUP' -UPDATE_ENVIRONMENT_GROUP = 'UPDATE_ENVIRONMENT_GROUP' -ADD_ENVIRONMENT_CONSUMPTION_ROLES = 'ADD_ENVIRONMENT_CONSUMPTION_ROLES' -LIST_ENVIRONMENT_CONSUMPTION_ROLES = 'LIST_ENVIRONMENT_CONSUMPTION_ROLES' -LIST_ENVIRONMENT_GROUP_PERMISSIONS = 'LIST_ENVIRONMENT_GROUP_PERMISSIONS' -LIST_ENVIRONMENT_DATASETS = 'LIST_ENVIRONMENT_DATASETS' -LIST_ENVIRONMENT_GROUPS = 'LIST_ENVIRONMENT_GROUPS' -CREDENTIALS_ENVIRONMENT = 'CREDENTIALS_ENVIRONMENT' -ENABLE_ENVIRONMENT_SUBSCRIPTIONS = 'ENABLE_ENVIRONMENT_SUBSCRIPTIONS' -DISABLE_ENVIRONMENT_SUBSCRIPTIONS = 'DISABLE_ENVIRONMENT_SUBSCRIPTIONS' -RUN_ATHENA_QUERY = 'RUN_ATHENA_QUERY' -CREATE_DATASET = 'CREATE_DATASET' -CREATE_SHARE_OBJECT = 'CREATE_SHARE_OBJECT' -LIST_ENVIRONMENT_SHARED_WITH_OBJECTS = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' -CREATE_REDSHIFT_CLUSTER = 'CREATE_REDSHIFT_CLUSTER' -LIST_ENVIRONMENT_REDSHIFT_CLUSTERS = 'LIST_ENVIRONMENT_REDSHIFT_CLUSTERS' -CREATE_NOTEBOOK = 'CREATE_NOTEBOOK' -LIST_ENVIRONMENT_NOTEBOOKS = 'LIST_ENVIRONMENT_NOTEBOOKS' -CREATE_SGMSTUDIO_NOTEBOOK = 'CREATE_SGMSTUDIO_NOTEBOOK' -LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS = 'LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS' -CREATE_DASHBOARD = 'CREATE_DASHBOARD' -LIST_ENVIRONMENT_DASHBOARDS = 'LIST_ENVIRONMENT_DASHBOARDS' -CREATE_PIPELINE = 'CREATE_PIPELINE' -LIST_PIPELINES = 'LIST_PIPELINES' -CREATE_NETWORK = 'CREATE_NETWORK' -LIST_ENVIRONMENT_NETWORKS = 'LIST_ENVIRONMENT_NETWORKS' - - -ENVIRONMENT_INVITED = [ - CREATE_DATASET, - LIST_ENVIRONMENT_GROUP_PERMISSIONS, - GET_ENVIRONMENT, - LIST_ENVIRONMENT_DATASETS, - LIST_ENVIRONMENT_GROUPS, - LIST_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_SHARE_OBJECT, - LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, - RUN_ATHENA_QUERY, - CREATE_REDSHIFT_CLUSTER, - LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, - CREATE_NOTEBOOK, - LIST_ENVIRONMENT_NOTEBOOKS, - CREATE_SGMSTUDIO_NOTEBOOK, - LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, - CREATE_DASHBOARD, - LIST_ENVIRONMENT_DASHBOARDS, - INVITE_ENVIRONMENT_GROUP, - ADD_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_PIPELINE, - LIST_PIPELINES, - CREATE_NETWORK, - LIST_ENVIRONMENT_NETWORKS, -] -ENVIRONMENT_INVITATION_REQUEST = [ - INVITE_ENVIRONMENT_GROUP, - ADD_ENVIRONMENT_CONSUMPTION_ROLES, - CREATE_DATASET, - CREATE_SHARE_OBJECT, - CREATE_REDSHIFT_CLUSTER, - CREATE_SGMSTUDIO_NOTEBOOK, - CREATE_NOTEBOOK, - CREATE_DASHBOARD, - CREATE_PIPELINE, - CREATE_NETWORK, -] -ENVIRONMENT_ALL = [ - UPDATE_ENVIRONMENT, - GET_ENVIRONMENT, - DELETE_ENVIRONMENT, - INVITE_ENVIRONMENT_GROUP, - REMOVE_ENVIRONMENT_GROUP, - UPDATE_ENVIRONMENT_GROUP, - LIST_ENVIRONMENT_GROUP_PERMISSIONS, - ADD_ENVIRONMENT_CONSUMPTION_ROLES, - LIST_ENVIRONMENT_CONSUMPTION_ROLES, - LIST_ENVIRONMENT_DATASETS, - LIST_ENVIRONMENT_GROUPS, - CREDENTIALS_ENVIRONMENT, - ENABLE_ENVIRONMENT_SUBSCRIPTIONS, - DISABLE_ENVIRONMENT_SUBSCRIPTIONS, - RUN_ATHENA_QUERY, - CREATE_DATASET, - CREATE_SHARE_OBJECT, - CREATE_REDSHIFT_CLUSTER, - LIST_ENVIRONMENT_REDSHIFT_CLUSTERS, - CREATE_NOTEBOOK, - LIST_ENVIRONMENT_NOTEBOOKS, - LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, - CREATE_SGMSTUDIO_NOTEBOOK, - LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, - CREATE_DASHBOARD, - LIST_ENVIRONMENT_DASHBOARDS, - CREATE_PIPELINE, - LIST_PIPELINES, - CREATE_NETWORK, - LIST_ENVIRONMENT_NETWORKS, -] -""" -CONSUMPTION_ROLE -""" -REMOVE_ENVIRONMENT_CONSUMPTION_ROLE = 'REMOVE_ENVIRONMENT_CONSUMPTION_ROLE' -CONSUMPTION_ENVIRONMENT_ROLE_ALL = [ - LIST_ENVIRONMENT_CONSUMPTION_ROLES, - ADD_ENVIRONMENT_CONSUMPTION_ROLES -] -CONSUMPTION_ROLE_ALL = [ - REMOVE_ENVIRONMENT_CONSUMPTION_ROLE -] - -""" -SHARE OBJECT -""" -ADD_ITEM = 'ADD_ITEM' -REMOVE_ITEM = 'REMOVE_ITEM' -SUBMIT_SHARE_OBJECT = 'SUBMIT_SHARE_OBJECT' -APPROVE_SHARE_OBJECT = 'APPROVE_SHARE_OBJECT' -REJECT_SHARE_OBJECT = 'REJECT_SHARE_OBJECT' -DELETE_SHARE_OBJECT = 'DELETE_SHARE_OBJECT' -GET_SHARE_OBJECT = 'GET_SHARE_OBJECT' -LIST_SHARED_ITEMS = 'LIST_SHARED_ITEMS' -SHARE_OBJECT_REQUESTER = [ - ADD_ITEM, - REMOVE_ITEM, - SUBMIT_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, - DELETE_SHARE_OBJECT, -] -SHARE_OBJECT_APPROVER = [ - ADD_ITEM, - REMOVE_ITEM, - APPROVE_SHARE_OBJECT, - REJECT_SHARE_OBJECT, - DELETE_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, -] -SHARE_OBJECT_ALL = [ - ADD_ITEM, - REMOVE_ITEM, - SUBMIT_SHARE_OBJECT, - APPROVE_SHARE_OBJECT, - REJECT_SHARE_OBJECT, - DELETE_SHARE_OBJECT, - GET_SHARE_OBJECT, - LIST_SHARED_ITEMS, -] -""" -DATASET PERMISSIONS -""" -GET_DATASET = 'GET_DATASET' -UPDATE_DATASET = 'UPDATE_DATASET' -SYNC_DATASET = 'SYNC_DATASET' -SUMMARY_DATASET = 'SUMMARY_DATASET' -IMPORT_DATASET = 'IMPORT_DATASET' -UPLOAD_DATASET = 'UPLOAD_DATASET' -LIST_DATASETS = 'LIST_DATASETS' -CREDENTIALS_DATASET = 'CREDENTIALS_DATASET' -URL_DATASET = 'URL_DATASET' -CRAWL_DATASET = 'CRAWL_DATASET' -DELETE_DATASET = 'DELETE_DATASET' -STACK_DATASET = 'STACK_DATASET' -SUBSCRIPTIONS_DATASET = 'SUBSCRIPTIONS_DATASET' -CREATE_DATASET_TABLE = 'CREATE_DATASET_TABLE' -DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' -UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' -PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' -LIST_DATASET_TABLES = 'LIST_DATASET_TABLES' -LIST_DATASET_SHARES = 'LIST_DATASET_SHARES' -CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' -DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -GET_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' -LIST_DATASET_FOLDERS = 'LIST_DATASET_FOLDERS' -UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' -DATASET_WRITE = [ - UPDATE_DATASET, - SYNC_DATASET, - SUMMARY_DATASET, - IMPORT_DATASET, - UPLOAD_DATASET, - CREDENTIALS_DATASET, - URL_DATASET, - CRAWL_DATASET, - DELETE_DATASET, - STACK_DATASET, - SUBSCRIPTIONS_DATASET, - UPDATE_DATASET_TABLE, - DELETE_DATASET_TABLE, - CREATE_DATASET_TABLE, - PROFILE_DATASET_TABLE, - LIST_DATASET_SHARES, - CREATE_DATASET_FOLDER, - DELETE_DATASET_FOLDER, - UPDATE_DATASET_FOLDER, - LIST_DATASET_FOLDERS, -] - -DATASET_READ = [ - GET_DATASET, - LIST_DATASETS, - LIST_DATASET_TABLES, - LIST_DATASET_SHARES, - LIST_DATASET_FOLDERS, - CREDENTIALS_DATASET, -] - -DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) - -""" -DATASET TABLE PERMISSIONS -""" -GET_DATASET_TABLE = 'GET_DATASET_TABLE' -PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' - -DATASET_TABLE_READ = [ - GET_DATASET_TABLE, - PREVIEW_DATASET_TABLE -] - -""" -GLOSSARIES -""" -CREATE_CATEGORY = 'CREATE_CATEGORY' -CREATE_TERM = 'CREATE_TERM' -UPDATE_NODE = 'UPDATE_NODE' -DELETE_GLOSSARY = 'DELETE_GLOSSARY' -APPROVE_ASSOCIATION = 'APPROVE_ASSOCIATION' -GLOSSARY_ALL = [ - CREATE_CATEGORY, - CREATE_TERM, - UPDATE_NODE, - DELETE_GLOSSARY, - APPROVE_ASSOCIATION, -] -""" -TENANT ALL -""" - -TENANT_ALL = [ - MANAGE_DATASETS, - MANAGE_REDSHIFT_CLUSTERS, - MANAGE_DASHBOARDS, - MANAGE_NOTEBOOKS, - MANAGE_PIPELINES, - MANAGE_WORKSHEETS, - MANAGE_GLOSSARIES, - MANAGE_GROUPS, - MANAGE_ENVIRONMENTS, - MANAGE_ORGANIZATIONS, -] - -TENANT_ALL_WITH_DESC = {k: k for k in TENANT_ALL} -TENANT_ALL_WITH_DESC[MANAGE_DASHBOARDS] = 'Manage dashboards' -TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' -TENANT_ALL_WITH_DESC[MANAGE_NOTEBOOKS] = 'Manage notebooks' -TENANT_ALL_WITH_DESC[MANAGE_REDSHIFT_CLUSTERS] = 'Manage Redshift clusters' -TENANT_ALL_WITH_DESC[MANAGE_GLOSSARIES] = 'Manage glossaries' -TENANT_ALL_WITH_DESC[MANAGE_WORKSHEETS] = 'Manage worksheets' -TENANT_ALL_WITH_DESC[MANAGE_ENVIRONMENTS] = 'Manage environments' -TENANT_ALL_WITH_DESC[MANAGE_GROUPS] = 'Manage teams' -TENANT_ALL_WITH_DESC[MANAGE_PIPELINES] = 'Manage pipelines' -TENANT_ALL_WITH_DESC[MANAGE_ORGANIZATIONS] = 'Manage organizations' - -""" -REDSHIFT CLUSTER -""" -GET_REDSHIFT_CLUSTER = 'GET_REDSHIFT_CLUSTER' -SHARE_REDSHIFT_CLUSTER = 'SHARE_REDSHIFT_CLUSTER' -DELETE_REDSHIFT_CLUSTER = 'DELETE_REDSHIFT_CLUSTER' -REBOOT_REDSHIFT_CLUSTER = 'REBOOT_REDSHIFT_CLUSTER' -RESUME_REDSHIFT_CLUSTER = 'RESUME_REDSHIFT_CLUSTER' -PAUSE_REDSHIFT_CLUSTER = 'PAUSE_REDSHIFT_CLUSTER' -ADD_DATASET_TO_REDSHIFT_CLUSTER = 'ADD_DATASET_TO_REDSHIFT_CLUSTER' -LIST_REDSHIFT_CLUSTER_DATASETS = 'LIST_REDSHIFT_CLUSTER_DATASETS' -REMOVE_DATASET_FROM_REDSHIFT_CLUSTER = 'REMOVE_DATASET_FROM_REDSHIFT_CLUSTER' -ENABLE_REDSHIFT_TABLE_COPY = 'ENABLE_REDSHIFT_TABLE_COPY' -DISABLE_REDSHIFT_TABLE_COPY = 'DISABLE_REDSHIFT_TABLE_COPY' -GET_REDSHIFT_CLUSTER_CREDENTIALS = 'GET_REDSHIFT_CLUSTER_CREDENTIALS' -REDSHIFT_CLUSTER_ALL = [ - GET_REDSHIFT_CLUSTER, - SHARE_REDSHIFT_CLUSTER, - DELETE_REDSHIFT_CLUSTER, - REBOOT_REDSHIFT_CLUSTER, - RESUME_REDSHIFT_CLUSTER, - PAUSE_REDSHIFT_CLUSTER, - ADD_DATASET_TO_REDSHIFT_CLUSTER, - LIST_REDSHIFT_CLUSTER_DATASETS, - REMOVE_DATASET_FROM_REDSHIFT_CLUSTER, - ENABLE_REDSHIFT_TABLE_COPY, - DISABLE_REDSHIFT_TABLE_COPY, - GET_REDSHIFT_CLUSTER_CREDENTIALS, -] - -""" -NOTEBOOKS -""" -GET_NOTEBOOK = 'GET_NOTEBOOK' -UPDATE_NOTEBOOK = 'UPDATE_NOTEBOOK' -DELETE_NOTEBOOK = 'DELETE_NOTEBOOK' -NOTEBOOK_ALL = [ - GET_NOTEBOOK, - DELETE_NOTEBOOK, - UPDATE_NOTEBOOK, -] - -""" -SAGEMAKER STUDIO NOTEBOOKS -""" -GET_SGMSTUDIO_NOTEBOOK = 'GET_SGMSTUDIO_NOTEBOOK' -UPDATE_SGMSTUDIO_NOTEBOOK = 'UPDATE_SGMSTUDIO_NOTEBOOK' -DELETE_SGMSTUDIO_NOTEBOOK = 'DELETE_SGMSTUDIO_NOTEBOOK' -SGMSTUDIO_NOTEBOOK_URL = 'SGMSTUDIO_NOTEBOOK_URL' -SGMSTUDIO_NOTEBOOK_ALL = [ - GET_SGMSTUDIO_NOTEBOOK, - UPDATE_SGMSTUDIO_NOTEBOOK, - DELETE_SGMSTUDIO_NOTEBOOK, - SGMSTUDIO_NOTEBOOK_URL, -] - -""" -DASHBOARDS -""" -GET_DASHBOARD = 'GET_DASHBOARD' -UPDATE_DASHBOARD = 'UPDATE_DASHBOARD' -DELETE_DASHBOARD = 'DELETE_DASHBOARD' -DASHBOARD_URL = 'DASHBOARD_URL' -SHARE_DASHBOARD = 'SHARE_DASHBOARD' -DASHBOARD_ALL = [ - GET_DASHBOARD, - UPDATE_DASHBOARD, - DELETE_DASHBOARD, - DASHBOARD_URL, - SHARE_DASHBOARD, -] - -""" -PIPELINES -""" -GET_PIPELINE = 'GET_PIPELINE' -UPDATE_PIPELINE = 'UPDATE_PIPELINE' -DELETE_PIPELINE = 'DELETE_PIPELINE' -CREDENTIALS_PIPELINE = 'CREDENTIALS_PIPELINE' -START_PIPELINE = 'START_PIPELINE' -PIPELINE_ALL = [ - CREATE_PIPELINE, - GET_PIPELINE, - UPDATE_PIPELINE, - DELETE_PIPELINE, - CREDENTIALS_PIPELINE, - START_PIPELINE, - LIST_PIPELINES, -] - -""" -WORKSHEETS -""" -GET_WORKSHEET = 'GET_WORKSHEET' -UPDATE_WORKSHEET = 'UPDATE_WORKSHEET' -DELETE_WORKSHEET = 'DELETE_WORKSHEET' -SHARE_WORKSHEET = 'SHARE_WORKSHEET' -RUN_WORKSHEET_QUERY = 'RUN_WORKSHEET_QUERY' -WORKSHEET_ALL = [ - GET_WORKSHEET, - UPDATE_WORKSHEET, - DELETE_WORKSHEET, - SHARE_WORKSHEET, - RUN_WORKSHEET_QUERY, -] -WORKSHEET_SHARED = [GET_WORKSHEET, UPDATE_WORKSHEET, RUN_WORKSHEET_QUERY] - -""" -NETWORKS -""" -GET_NETWORK = 'GET_NETWORK' -UPDATE_NETWORK = 'UPDATE_NETWORK' -DELETE_NETWORK = 'DELETE_NETWORK' -NETWORK_ALL = [GET_NETWORK, UPDATE_NETWORK, DELETE_NETWORK] - -""" -RESOURCES_ALL -""" - -RESOURCES_ALL = ( - DATASET_ALL - + DATASET_TABLE_READ - + ORGANIZATION_ALL - + ENVIRONMENT_ALL - + CONSUMPTION_ROLE_ALL - + SHARE_OBJECT_ALL - + REDSHIFT_CLUSTER_ALL - + NOTEBOOK_ALL - + GLOSSARY_ALL - + SGMSTUDIO_NOTEBOOK_ALL - + DASHBOARD_ALL - + WORKSHEET_ALL - + PIPELINE_ALL - + NETWORK_ALL -) - -RESOURCES_ALL_WITH_DESC = {k: k for k in RESOURCES_ALL} -RESOURCES_ALL_WITH_DESC[CREATE_DATASET] = 'Create datasets on this environment' -RESOURCES_ALL_WITH_DESC[CREATE_DASHBOARD] = 'Create dashboards on this environment' -RESOURCES_ALL_WITH_DESC[CREATE_NOTEBOOK] = 'Create notebooks on this environment' -RESOURCES_ALL_WITH_DESC[CREATE_REDSHIFT_CLUSTER] = 'Create Redshift clusters on this environment' -RESOURCES_ALL_WITH_DESC[CREATE_SGMSTUDIO_NOTEBOOK] = 'Create ML Studio profiles on this environment' -RESOURCES_ALL_WITH_DESC[INVITE_ENVIRONMENT_GROUP] = 'Invite other teams to this environment' -RESOURCES_ALL_WITH_DESC[ADD_ENVIRONMENT_CONSUMPTION_ROLES] = 'Add IAM consumption roles to this environment' -RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Request datasets access for this environment' -RESOURCES_ALL_WITH_DESC[CREATE_PIPELINE] = 'Create pipelines on this environment' -RESOURCES_ALL_WITH_DESC[CREATE_NETWORK] = 'Create networks on this environment' diff --git a/backend/dataall/modules/__init__.py b/backend/dataall/modules/__init__.py new file mode 100644 index 000000000..7e1d5c42a --- /dev/null +++ b/backend/dataall/modules/__init__.py @@ -0,0 +1,31 @@ +""" +Contains all submodules that can be plugged into the main functionality + +How to migrate to a new module: +1) Create your python module +2) Create an implementation of ModuleInterface/s in __init__.py of your module +3) Define your module in config.json. The loader will use it to import your module + +Remember that there should not be any references from outside to modules. +The rule is simple: modules can import the core code, but not the other way around +Otherwise your modules will be imported automatically. +You can add logging about the importing the module in __init__.py to track unintentionally imports + +Auto import of modules: +api - contains the logic for processing GraphQL request. It registered itself automatically +see bootstrap() and @cache_instances + +cdk - contains stacks that will be deployed to AWS then it's requested. Stacks will +register itself automatically if there is decorator @stack upon the class +see StackManagerFactory and @stack - for more information on stacks + +handlers - contains code for long-running tasks that will be delegated to lambda +These task will automatically register themselves when there is @Worker.handler +upon the static! method. +see WorkerHandler - for more information on short-living tasks + +Another example of auto import is service policies. If your module has a service policy +it will be automatically imported if it inherited from ServicePolicy or S3Policy + +Any manual import should be done in __init__ file of the module in ModuleInterface +""" diff --git a/backend/dataall/modules/catalog/__init__.py b/backend/dataall/modules/catalog/__init__.py new file mode 100644 index 000000000..c02f06803 --- /dev/null +++ b/backend/dataall/modules/catalog/__init__.py @@ -0,0 +1,32 @@ +from typing import Set + +from dataall.base.loader import ModuleInterface, ImportMode + + +class CatalogIndexerModuleInterface(ModuleInterface): + """ + Base code that can be imported with all modes + """ + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CATALOG_INDEXER_TASK in modes + + def __init__(self): + from dataall.modules.catalog import tasks + + +class CatalogApiModuleInterface(ModuleInterface): + """ + Implements ModuleInterface for catalog code in GraphQl lambda. + This module interface is used in dashboards and datasets + + """ + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.catalog.api + import dataall.modules.catalog.indexers diff --git a/backend/dataall/modules/catalog/api/__init__.py b/backend/dataall/modules/catalog/api/__init__.py new file mode 100644 index 000000000..8ff5793c6 --- /dev/null +++ b/backend/dataall/modules/catalog/api/__init__.py @@ -0,0 +1,10 @@ +from . import ( + input_types, + queries, + mutations, + resolvers, + registry, + types, +) + +__all__ = ['registry', 'resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/catalog/api/enums.py b/backend/dataall/modules/catalog/api/enums.py new file mode 100644 index 000000000..a63b82074 --- /dev/null +++ b/backend/dataall/modules/catalog/api/enums.py @@ -0,0 +1,7 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class GlossaryRole(GraphQLEnumMapper): + # Permissions on a glossary + Admin = '900' + NoPermission = '000' diff --git a/backend/dataall/modules/catalog/api/input_types.py b/backend/dataall/modules/catalog/api/input_types.py new file mode 100644 index 000000000..2da635530 --- /dev/null +++ b/backend/dataall/modules/catalog/api/input_types.py @@ -0,0 +1,110 @@ +from dataall.base.api import gql + +CreateGlossaryInput = gql.InputType( + name='CreateGlossaryInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), + gql.Argument(name='status', type=gql.String), + gql.Argument(name='admin', type=gql.String), + ], +) + +UpdateGlossaryInput = gql.InputType( + name='UpdateGlossaryInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='readme', type=gql.String), + gql.Argument(name='status', type=gql.String), + gql.Argument(name='admin', type=gql.String), + ], +) + + +CreateCategoryInput = gql.InputType( + name='CreateCategoryInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), + gql.Argument(name='status', type=gql.String), + ], +) + +UpdateCategoryInput = gql.InputType( + name='UpdateCategoryInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='readme', type=gql.String), + gql.Argument(name='status', type=gql.String), + ], +) + +CreateTermInput = gql.InputType( + name='CreateTermInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='readme', type=gql.NonNullableType(gql.String)), + gql.Argument(name='status', type=gql.String), + ], +) + + +UpdateTermInput = gql.InputType( + name='UpdateTermInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='readme', type=gql.String), + gql.Argument(name='status', type=gql.String), + ], +) + + +GlossaryFilter = gql.InputType( + name='GlossaryFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='status', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +CategoryFilter = gql.InputType( + name='CategoryFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='status', type=gql.String), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + + +TermFilter = gql.InputType( + name='TermFilter', + arguments=[ + gql.Argument(name='status', type=gql.String), + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +GlossaryTermTargetFilter = gql.InputType( + name='GlossaryTermTargetFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +GlossaryNodeSearchFilter = gql.InputType( + name='GlossaryNodeSearchFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='nodeType', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) diff --git a/backend/dataall/modules/catalog/api/mutations.py b/backend/dataall/modules/catalog/api/mutations.py new file mode 100644 index 000000000..d46639325 --- /dev/null +++ b/backend/dataall/modules/catalog/api/mutations.py @@ -0,0 +1,129 @@ +from dataall.base.api import gql +from dataall.modules.catalog.api.resolvers import ( + create_glossary, update_node, delete_node, create_category, create_term, link_term, + request_link, approve_term_association, dismiss_term_association +) + + +createGlossary = gql.MutationField( + name='createGlossary', + args=[gql.Argument(name='input', type=gql.Ref('CreateGlossaryInput'))], + resolver=create_glossary, + type=gql.Ref('Glossary'), +) + + +UpdateGlossary = gql.MutationField( + name='updateGlossary', + resolver=update_node, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('UpdateGlossaryInput')), + ], + type=gql.Ref('Glossary'), +) + +deleteGlossary = gql.MutationField( + name='deleteGlossary', + resolver=delete_node, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Integer, +) + + +CreateCategory = gql.MutationField( + name='createCategory', + args=[ + gql.Argument(name='parentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('CreateCategoryInput')), + ], + resolver=create_category, + type=gql.Ref('Category'), +) + +updateCategory = gql.MutationField( + name='updateCategory', + resolver=update_node, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('UpdateCategoryInput')), + ], + type=gql.Ref('Category'), +) + +deleteCategory = gql.MutationField( + name='deleteCategory', + resolver=delete_node, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Integer, +) + + +linkTerm = gql.MutationField( + name='linkTerm', + resolver=link_term, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + type=gql.Ref('GlossaryTermLink'), +) + +requestLink = gql.MutationField( + name='requestLink', + resolver=request_link, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + type=gql.Ref('GlossaryTermLink'), +) + + +createTerm = gql.MutationField( + name='createTerm', + type=gql.Ref('Term'), + resolver=create_term, + args=[ + gql.Argument(name='parentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('CreateTermInput')), + ], +) + +updateTerm = gql.MutationField( + name='updateTerm', + type=gql.Ref('Term'), + resolver=update_node, + args=[ + gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('UpdateTermInput')), + ], +) + +deleteTerm = gql.MutationField( + name='deleteTerm', + type=gql.Integer, + resolver=delete_node, + args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], +) + + +approveTermAssociation = gql.MutationField( + name='approveTermAssociation', + type=gql.Boolean, + resolver=approve_term_association, + args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], +) + +dismissTermAssociation = gql.MutationField( + name='dismissTermAssociation', + type=gql.Boolean, + resolver=dismiss_term_association, + args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], +) diff --git a/backend/dataall/modules/catalog/api/queries.py b/backend/dataall/modules/catalog/api/queries.py new file mode 100644 index 000000000..5d6a9eba5 --- /dev/null +++ b/backend/dataall/modules/catalog/api/queries.py @@ -0,0 +1,81 @@ +from dataall.base.api import gql +from dataall.modules.catalog.api.resolvers import ( + get_node, list_glossaries, search_terms, hierarchical_search, get_link, list_asset_linked_terms +) + +getGlossary = gql.QueryField( + name='getGlossary', + args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], + resolver=get_node, + type=gql.Ref('Glossary'), +) + + +getCategory = gql.QueryField( + name='getCategory', + resolver=get_node, + args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('Category'), +) + + +getTerm = gql.QueryField( + name='getTerm', + resolver=get_node, + args=[gql.Argument(name='nodeUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('Term'), +) + +listGlossaries = gql.QueryField( + name='listGlossaries', + type=gql.Ref('GlossarySearchResult'), + args=[gql.Argument(name='filter', type=gql.Ref('GlossaryFilter'))], + resolver=list_glossaries, +) + + +SearchTerms = gql.QueryField( + name='searchTerms', + doc='Search glossary terms', + type=gql.Ref('TermSearchResult'), + args=[gql.Argument(name='filter', type=gql.Ref('TermFilter'))], + resolver=search_terms, +) + + +searchGlossaryHierarchy = gql.QueryField( + name='searchGlossaryHierarchy', + doc='Search glossary terms in the hierarchy', + type=gql.Ref('GlossaryChildrenSearchResult'), + args=[gql.Argument(name='filter', type=gql.Ref('TermFilter'))], + resolver=hierarchical_search, +) + + +SearchGlossary = gql.QueryField( + name='searchGlossary', + doc='Search glossary ', + type=gql.Ref('GlossaryChildrenSearchResult'), + args=[gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter'))], + resolver=search_terms, +) + + +getGlossaryTermLink = gql.QueryField( + name='getGlossaryTermLink', + doc='Returns a TermLink from its linkUri', + type=gql.Ref('GlossaryTermLink'), + resolver=get_link, + args=[gql.Argument(name='linkUri', type=gql.NonNullableType(gql.String))], +) + +listAssetLinkedTerms = gql.QueryField( + name='listAssetLinkedTerms', + doc='return all terms associated with a data asset', + args=[ + gql.Argument(name='uri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')), + ], + resolver=list_asset_linked_terms, + type=gql.Ref('TermLinkSearchResults'), +) diff --git a/backend/dataall/modules/catalog/api/registry.py b/backend/dataall/modules/catalog/api/registry.py new file mode 100644 index 000000000..8e99b8645 --- /dev/null +++ b/backend/dataall/modules/catalog/api/registry.py @@ -0,0 +1,60 @@ +from dataclasses import dataclass +from typing import Type, Dict, Optional, Protocol, Union + +from dataall.base.api import gql +from dataall.base.api.gql.graphql_union_type import UnionTypeRegistry +from dataall.base.db import Resource +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer + + +class Identifiable(Protocol): + @classmethod + def uri(cls): + ... + + +@dataclass +class GlossaryDefinition: + """Glossary's definition used for registration references of other modules""" + target_type: str + object_type: str + model: Union[Type[Resource], Identifiable] # should be an intersection, but python typing doesn't have one yet + reindexer: Type[BaseIndexer] = None # a callback to reindex glossaries in open search + + def target_uri(self): + return self.model.uri() + + +class GlossaryRegistry(UnionTypeRegistry): + """Registry of glossary definition and API to retrieve and reindex data""" + _DEFINITIONS: Dict[str, GlossaryDefinition] = {} + + @classmethod + def register(cls, glossary: GlossaryDefinition) -> None: + cls._DEFINITIONS[glossary.target_type] = glossary + + @classmethod + def find_model(cls, target_type: str) -> Optional[Resource]: + definition = cls._DEFINITIONS[target_type] + return definition.model if definition is not None else None + + @classmethod + def find_object_type(cls, model: Resource) -> Optional[str]: + for _, definition in cls._DEFINITIONS.items(): + if isinstance(model, definition.model): + return definition.object_type + return None + + @classmethod + def definitions(cls): + return cls._DEFINITIONS.values() + + @classmethod + def types(cls): + return [gql.Ref(definition.object_type) for definition in cls._DEFINITIONS.values()] + + @classmethod + def reindex(cls, session, target_type: str, target_uri: str): + definition = cls._DEFINITIONS[target_type] + if definition.reindexer: + definition.reindexer.upsert(session, target_uri) diff --git a/backend/dataall/modules/catalog/api/resolvers.py b/backend/dataall/modules/catalog/api/resolvers.py new file mode 100644 index 000000000..061a9abf4 --- /dev/null +++ b/backend/dataall/modules/catalog/api/resolvers.py @@ -0,0 +1,428 @@ +from datetime import datetime + +from sqlalchemy import and_, or_, asc + +from dataall.modules.catalog.api.enums import GlossaryRole +from dataall.modules.catalog.api.registry import GlossaryRegistry +from dataall.base.api.context import Context +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.modules.catalog.db.glossary_models import TermLink, GlossaryNode +from dataall.base.db import paginate, exceptions + + +def resolve_glossary_node(obj: GlossaryNode, *_): + if obj.nodeType == 'G': + return 'Glossary' + elif obj.nodeType == 'C': + return 'Category' + elif obj.nodeType == 'T': + return 'Term' + else: + return None + + +def create_glossary( + context: Context, source, input: dict = None +) -> GlossaryNode: + with context.engine.scoped_session() as session: + return Glossary.create_glossary(session, input) + + +def tree(context: Context, source: GlossaryNode): + if not source: + return None + adjency_list = {} + with context.engine.scoped_session() as session: + q = session.query(GlossaryNode).filter( + GlossaryNode.path.startswith(f'{source.path}/') + ) + for node in q: + if not adjency_list.get(node.parentUri): + adjency_list[node.parentUri] = [] + + +def node_tree(context: Context, source: GlossaryNode, filter: dict = None): + if not source: + return None + if not filter: + filter = {} + with context.engine.scoped_session() as session: + q = ( + session.query(GlossaryNode) + .filter(GlossaryNode.path.startswith(source.path)) + .filter(GlossaryNode.deleted.is_(None)) + .order_by(asc(GlossaryNode.path)) + ) + term = filter.get('term') + nodeType = filter.get('nodeType') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + if nodeType: + q = q.filter(GlossaryNode.nodeType == nodeType) + + return paginate( + q, page_size=filter.get('pageSize', 10), page=filter.get('page', 1) + ).to_dict() + + +def list_node_children( + context: Context, source: GlossaryNode, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.list_node_children(session, source, filter) + + +def create_category( + context: Context, source, parentUri: str = None, input: dict = None +): + with context.engine.scoped_session() as session: + return Glossary.create_category( + session=session, + uri=parentUri, + data=input, + ) + + +def create_term(context: Context, source, parentUri: str = None, input: dict = None): + with context.engine.scoped_session() as session: + return Glossary.create_term( + session=session, + uri=parentUri, + data=input, + ) + + +def list_glossaries(context: Context, source, filter: dict = None): + if filter is None: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.list_glossaries( + session=session, + data=filter, + ) + + +def resolve_categories( + context: Context, source: GlossaryNode, filter: dict = None +): + if not source: + return None + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.list_categories( + session=session, + uri=source.nodeUri, + data=filter, + ) + + +def resolve_terms(context: Context, source: GlossaryNode, filter: dict = None): + if not source: + return None + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.list_terms( + session=session, + uri=source.nodeUri, + data=filter, + ) + + +def update_node( + context: Context, source, nodeUri: str = None, input: dict = None +) -> GlossaryNode: + with context.engine.scoped_session() as session: + return Glossary.update_node( + session, + uri=nodeUri, + data=input, + ) + + +def get_node(context: Context, source, nodeUri: str = None): + with context.engine.scoped_session() as session: + node: GlossaryNode = session.query(GlossaryNode).get(nodeUri) + if not node: + raise exceptions.ObjectNotFound('Node', nodeUri) + return node + + +def resolve_user_role(context: Context, source: GlossaryNode, **kwargs): + if not source: + return None + if source.admin in context.groups: + return GlossaryRole.Admin.value + return GlossaryRole.NoPermission.value + + +def delete_node(context: Context, source, nodeUri: str = None) -> bool: + with context.engine.scoped_session() as session: + return Glossary.delete_node(session, nodeUri) + + +def hierarchical_search(context: Context, source, filter: dict = None): + if not filter: + filter = {} + + with context.engine.scoped_session() as session: + return Glossary.hierarchical_search( + session=session, + data=filter, + ) + + +def resolve_link(context, source, targetUri: str = None): + if not source: + return None + with context.engine.scoped_session() as session: + link = ( + session.query(TermLink) + .filter( + and_( + TermLink.nodeUri == source.nodeUri, + TermLink.targetUri == targetUri, + ) + ) + .first() + ) + if not link: + link = { + 'nodeUri': source.nodeUri, + 'targetUri': targetUri, + 'created': datetime.now().isoformat(), + 'owner': context.username, + 'approvedByOwner': False, + 'approvedBySteward': False, + } + + return link + + +def search_terms(context: Context, source, filter: dict = None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.search_terms( + session=session, + data=filter, + ) + + +def request_link( + context: Context, + source, + nodeUri: str = None, + targetUri: str = None, + targetType: str = None, +): + with context.engine.scoped_session() as session: + return Glossary.link_term( + session=session, + uri=nodeUri, + data={ + 'targetUri': targetUri, + 'targetType': targetType, + 'approvedByOwner': True, + 'approvedBySteward': False, + }, + target_model=_target_model(targetType), + ) + + +def link_term( + context: Context, + source, + nodeUri: str = None, + targetUri: str = None, + targetType: str = None, +): + with context.engine.scoped_session() as session: + return Glossary.link_term( + session=session, + uri=nodeUri, + data={ + 'targetUri': targetUri, + 'targetType': targetType, + 'approvedByOwner': True, + 'approvedBySteward': True, + }, + target_model=_target_model(targetType), + ) + + +def resolve_term_glossary(context, source: GlossaryNode, **kwargs): + with context.engine.scoped_session() as session: + parentUri = source.path.split('/')[1] + return session.query(GlossaryNode).get(parentUri) + + +def get_link(context: Context, source, linkUri: str = None): + with context.engine.scoped_session() as session: + link = session.query(TermLink).get(linkUri) + if not link: + raise exceptions.ObjectNotFound('Link', linkUri) + return link + + +def target_union_resolver(obj, *_): + return GlossaryRegistry.find_object_type(obj) + + +def resolve_link_target(context, source, **kwargs): + with context.engine.scoped_session() as session: + model = GlossaryRegistry.find_model(source.targetType) + target = session.query(model).get(source.targetUri) + return target + + +def resolve_term_associations( + context, source: GlossaryNode, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return Glossary.list_term_associations( + session=session, + data={'source': source, 'filter': filter}, + target_model_definitions=GlossaryRegistry.definitions() + ) + + +def resolve_stats(context, source: GlossaryNode, **kwargs): + + with context.engine.scoped_session() as session: + categories = ( + session.query(GlossaryNode) + .filter( + and_( + GlossaryNode.path.startswith(source.path), + GlossaryNode.nodeType == 'C', + GlossaryNode.deleted.is_(None), + ) + ) + .count() + ) + terms = ( + session.query(GlossaryNode) + .filter( + and_( + GlossaryNode.path.startswith(source.path), + GlossaryNode.nodeType == 'T', + GlossaryNode.deleted.is_(None), + ) + ) + .count() + ) + + associations = ( + session.query(TermLink) + .join( + GlossaryNode, + GlossaryNode.nodeType == TermLink.nodeUri, + ) + .filter(GlossaryNode.path.startswith(source.path)) + .count() + ) + + return {'categories': categories, 'terms': terms, 'associations': associations} + + +def list_asset_linked_terms( + context: Context, source, uri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + q = ( + session.query(TermLink) + .join( + GlossaryNode, + GlossaryNode.nodeUri == TermLink.nodeUri, + ) + .filter(TermLink.targetUri == uri) + ) + term = filter.get('term') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 10) + ).to_dict() + + +def resolve_link_node(context: Context, source: TermLink, **kwargs): + with context.engine.scoped_session() as session: + term = session.query(GlossaryNode).get(source.nodeUri) + return term + + +def approve_term_association(context: Context, source, linkUri: str = None): + updated = False + with context.engine.scoped_session() as session: + link: TermLink = session.query(TermLink).get(linkUri) + if not link: + raise exceptions.ObjectNotFound('Link', linkUri) + verify_term_association_approver_role( + session, context.username, context.groups, link + ) + if not link.approvedBySteward: + link.approvedBySteward = True + updated = True + reindex(context, linkUri=linkUri) + return updated + + +def dismiss_term_association(context: Context, source, linkUri: str = None): + updated = False + with context.engine.scoped_session() as session: + link: TermLink = session.query(TermLink).get(linkUri) + if not link: + raise exceptions.ObjectNotFound('Link', linkUri) + verify_term_association_approver_role( + session, context.username, context.groups, link + ) + if link.approvedBySteward: + link.approvedBySteward = False + updated = True + reindex(context, linkUri=linkUri) + return updated + + +def verify_term_association_approver_role(session, username, groups, link): + glossary_node = session.query(GlossaryNode).get(link.nodeUri) + if glossary_node.owner != username and glossary_node.admin not in groups: + raise exceptions.UnauthorizedOperation( + 'ASSOCIATE_GLOSSARY_TERM', + f'User: {username} is not allowed to manage glossary term associations', + ) + + +def reindex(context, linkUri): + with context.engine.scoped_session() as session: + link: TermLink = session.query(TermLink).get(linkUri) + if not link: + return + + GlossaryRegistry.reindex(session, link.targetType, link.targetUri) + + +def _target_model(target_type: str): + target_model = GlossaryRegistry.find_model(target_type) + if not target_model: + raise exceptions.InvalidInput( + 'NodeType', 'term.nodeType', 'association target type is invalid' + ) + return target_model diff --git a/backend/dataall/modules/catalog/api/types.py b/backend/dataall/modules/catalog/api/types.py new file mode 100644 index 000000000..7179ceba9 --- /dev/null +++ b/backend/dataall/modules/catalog/api/types.py @@ -0,0 +1,289 @@ +from dataall.base.api import gql +from dataall.modules.catalog.api.enums import GlossaryRole +from dataall.modules.catalog.api.resolvers import ( + resolve_glossary_node, resolve_user_role, resolve_link, resolve_term_glossary, resolve_stats, + node_tree, list_node_children, resolve_categories, resolve_term_associations, resolve_terms, target_union_resolver, + resolve_link_node, resolve_link_target, + +) +from dataall.modules.catalog.api.registry import GlossaryRegistry + +GlossaryNode = gql.Union( + name='GlossaryNode', + types=[ + gql.Ref('Glossary'), + gql.Ref('Category'), + gql.Ref('Term'), + ], + resolver=resolve_glossary_node, +) + +GlossaryChildrenSearchResult = gql.ObjectType( + name='GlossaryChildrenSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('GlossaryNode'))), + ], +) + +Glossary = gql.ObjectType( + name='Glossary', + fields=[ + gql.Field(name='nodeUri', type=gql.ID), + gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='status', type=gql.String), + gql.Field(name='owner', type=gql.NonNullableType(gql.String)), + gql.Field(name='path', type=gql.NonNullableType(gql.String)), + gql.Field(name='label', type=gql.NonNullableType(gql.String)), + gql.Field(name='name', type=gql.NonNullableType(gql.String)), + gql.Field(name='admin', type=gql.String), + gql.Field( + name='userRoleForGlossary', + type=GlossaryRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field(name='readme', type=gql.String), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='isMatch', type=gql.Boolean), + gql.Field( + name='assetLink', + args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], + resolver=resolve_link, + type=gql.Ref('GlossaryTermLink'), + ), + gql.Field( + name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') + ), + gql.Field( + resolver=node_tree, + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) + ], + name='tree', + type=gql.Ref('GlossaryChildrenSearchResult'), + ), + gql.Field( + resolver=list_node_children, + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) + ], + name='children', + type=gql.Ref('GlossaryChildrenSearchResult'), + ), + gql.Field( + name='categories', + args=[gql.Argument(name='filter', type=gql.Ref('CategoryFilter'))], + resolver=resolve_categories, + type=gql.Ref('CategorySearchResult'), + ), + gql.Field( + name='associations', + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) + ], + resolver=resolve_term_associations, + type=gql.Ref('TermLinkSearchResults'), + ), + ], +) + + +Category = gql.ObjectType( + name='Category', + fields=[ + gql.Field(name='nodeUri', type=gql.ID), + gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='owner', type=gql.NonNullableType(gql.String)), + gql.Field(name='path', type=gql.NonNullableType(gql.String)), + gql.Field(name='label', type=gql.NonNullableType(gql.String)), + gql.Field(name='status', type=gql.NonNullableType(gql.String)), + gql.Field(name='name', type=gql.NonNullableType(gql.String)), + gql.Field(name='readme', type=gql.String), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='isMatch', type=gql.Boolean), + gql.Field( + name='assetLink', + args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], + resolver=resolve_link, + type=gql.Ref('GlossaryTermLink'), + ), + gql.Field( + name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') + ), + gql.Field( + resolver=list_node_children, + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) + ], + name='children', + type=gql.Ref('GlossaryChildrenSearchResult'), + ), + gql.Field( + name='categories', + resolver=resolve_categories, + args=[ + gql.Argument(name='filter', type=gql.Ref('CategoryFilter')), + ], + type=gql.Ref('CategorySearchResult'), + ), + gql.Field( + name='terms', + resolver=resolve_terms, + args=[ + gql.Argument(name='filter', type=gql.Ref('TermFilter')), + ], + type=gql.Ref('TermSearchResult'), + ), + gql.Field( + name='associations', + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) + ], + resolver=resolve_term_associations, + type=gql.Ref('TermLinkSearchResults'), + ), + ], +) + +Term = gql.ObjectType( + name='Term', + fields=[ + gql.Field(name='nodeUri', type=gql.ID), + gql.Field(name='parentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='owner', type=gql.NonNullableType(gql.String)), + gql.Field(name='path', type=gql.NonNullableType(gql.String)), + gql.Field(name='label', type=gql.NonNullableType(gql.String)), + gql.Field(name='name', type=gql.NonNullableType(gql.String)), + gql.Field(name='status', type=gql.NonNullableType(gql.String)), + gql.Field(name='readme', type=gql.String), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='isMatch', type=gql.Boolean), + gql.Field( + name='assetLink', + args=[gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String))], + resolver=resolve_link, + type=gql.Ref('GlossaryTermLink'), + ), + gql.Field( + resolver=list_node_children, + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryNodeSearchFilter')) + ], + name='children', + type=gql.Ref('GlossaryChildrenSearchResult'), + ), + gql.Field( + name='stats', resolver=resolve_stats, type=gql.Ref('GlossaryNodeStatistics') + ), + gql.Field( + name='glossary', type=gql.Ref('Glossary'), resolver=resolve_term_glossary + ), + gql.Field( + name='associations', + args=[ + gql.Argument(name='filter', type=gql.Ref('GlossaryTermTargetFilter')) + ], + resolver=resolve_term_associations, + type=gql.Ref('TermLinkSearchResults'), + ), + ], +) + +TermLinkSearchResults = gql.ObjectType( + name='TermLinkSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('GlossaryTermLink'))), + ], +) + + +TermSearchResult = gql.ObjectType( + name='TermSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Term'))), + ], +) + + +CategorySearchResult = gql.ObjectType( + name='CategorySearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Category'))), + ], +) + + +GlossarySearchResult = gql.ObjectType( + name='GlossarySearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Glossary'))), + ], +) + +GlossaryTermLinkTarget = gql.Union( + name='GlossaryTermLinkTarget', + type_registry=GlossaryRegistry, + resolver=target_union_resolver, +) + +GlossaryTermLink = gql.ObjectType( + 'GlossaryTermLink', + fields=[ + gql.Field(name='linkUri', type=gql.ID), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='updated', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='nodeUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='targetType', type=gql.NonNullableType(gql.String)), + gql.Field(name='approvedByOwner', type=gql.NonNullableType(gql.Boolean)), + gql.Field(name='approvedBySteward', type=gql.NonNullableType(gql.Boolean)), + gql.Field(name='term', resolver=resolve_link_node, type=gql.Ref('Term')), + gql.Field( + name='target', + resolver=resolve_link_target, + type=gql.Ref('GlossaryTermLinkTarget'), + ), + ], +) + + +GlossaryNodeStatistics = gql.ObjectType( + name='GlossaryNodeStatistics', + fields=[ + gql.Field(name='categories', type=gql.Integer), + gql.Field(name='terms', type=gql.Integer), + gql.Field(name='associations', type=gql.Integer), + ], +) diff --git a/frontend/src/api/Glossary/listGlossaryCategories.js b/backend/dataall/modules/catalog/db/__init__.py similarity index 100% rename from frontend/src/api/Glossary/listGlossaryCategories.js rename to backend/dataall/modules/catalog/db/__init__.py diff --git a/backend/dataall/modules/catalog/db/glossary_models.py b/backend/dataall/modules/catalog/db/glossary_models.py new file mode 100644 index 000000000..bd0ef0e1f --- /dev/null +++ b/backend/dataall/modules/catalog/db/glossary_models.py @@ -0,0 +1,53 @@ +import enum +from datetime import datetime + +from sqlalchemy import Boolean, Column, String, DateTime, Enum +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base +from dataall.base.db import utils + + +class GlossaryNodeStatus(enum.Enum): + draft = 'draft' + approved = 'approved' + expired = 'expired' + alert = 'alert' + archived = 'archived' + + +class GlossaryNode(Base): + __tablename__ = 'glossary_node' + nodeUri = Column(String, primary_key=True, default=utils.uuid('glossary_node')) + parentUri = Column(String, nullable=True) + nodeType = Column(String, default='G') + status = Column( + String, Enum(GlossaryNodeStatus), default=GlossaryNodeStatus.draft.value + ) + path = Column(String, nullable=False) + label = Column(String, nullable=False) + readme = Column(String, nullable=False) + created = Column(DateTime, default=datetime.now) + updated = Column(DateTime, nullable=True, onupdate=datetime.now) + deleted = Column(DateTime, nullable=True) + owner = Column(String, nullable=False) + admin = Column(String, nullable=True) + isLinked = query_expression() + isMatch = query_expression() + + +class TermLink(Base): + __tablename__ = 'term_link' + linkUri = Column(String, primary_key=True, default=utils.uuid('term_link')) + nodeUri = Column(String, nullable=False) + targetUri = Column(String, nullable=False) + targetType = Column(String, nullable=False) + approvedBySteward = Column(Boolean, default=False) + approvedByOwner = Column(Boolean, default=False) + owner = Column(String, nullable=False) + created = Column(DateTime, default=datetime.now) + updated = Column(DateTime, nullable=True, onupdate=datetime.now) + deleted = Column(DateTime, nullable=True) + path = query_expression() + label = query_expression() + readme = query_expression() diff --git a/backend/dataall/modules/catalog/db/glossary_repositories.py b/backend/dataall/modules/catalog/db/glossary_repositories.py new file mode 100644 index 000000000..48c8a4b02 --- /dev/null +++ b/backend/dataall/modules/catalog/db/glossary_repositories.py @@ -0,0 +1,452 @@ +import logging +from datetime import datetime + +from sqlalchemy import asc, or_, and_, literal, case +from sqlalchemy.orm import with_expression, aliased + +from dataall.base.db import exceptions, paginate, Resource +from dataall.core.permissions import permissions +from dataall.modules.catalog.db.glossary_models import GlossaryNodeStatus, TermLink, GlossaryNode +from dataall.base.db.paginator import Page +from dataall.base.context import get_context +from dataall.core.permissions.permission_checker import has_tenant_permission + +logger = logging.getLogger(__name__) + + +class Glossary: + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def create_glossary(session, data=None): + Glossary.validate_params(data) + g: GlossaryNode = GlossaryNode( + label=data.get('label'), + nodeType='G', + parentUri='', + path='/', + readme=data.get('readme', 'no description available'), + owner=get_context().username, + admin=data.get('admin'), + status=GlossaryNodeStatus.approved.value, + ) + session.add(g) + session.commit() + g.path = f'/{g.nodeUri}' + return g + + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def create_category(session, uri, data=None): + Glossary.validate_params(data) + parent: GlossaryNode = session.query(GlossaryNode).get(uri) + if not parent: + raise exceptions.ObjectNotFound('Glossary', uri) + + cat = GlossaryNode( + path=parent.path, + parentUri=parent.nodeUri, + nodeType='C', + label=data.get('label'), + owner=get_context().username, + readme=data.get('readme'), + ) + session.add(cat) + session.commit() + cat.path = parent.path + '/' + cat.nodeUri + return cat + + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def create_term(session, uri, data=None): + Glossary.validate_params(data) + parent: GlossaryNode = session.query(GlossaryNode).get(uri) + if not parent: + raise exceptions.ObjectNotFound('Glossary or Category', uri) + if parent.nodeType not in ['G', 'C']: + raise exceptions.InvalidInput( + 'Term', uri, 'Category or Glossary are required to create a term' + ) + + term = GlossaryNode( + path=parent.path, + parentUri=parent.nodeUri, + nodeType='T', + label=data.get('label'), + readme=data.get('readme'), + owner=get_context().username, + ) + session.add(term) + session.commit() + term.path = parent.path + '/' + term.nodeUri + return term + + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def delete_node(session, uri): + count = 0 + node: GlossaryNode = session.query(GlossaryNode).get(uri) + if not node: + raise exceptions.ObjectNotFound('Node', uri) + node.deleted = datetime.now() + if node.nodeType in ['G', 'C']: + children = session.query(GlossaryNode).filter( + and_( + GlossaryNode.path.startswith(node.path), + GlossaryNode.deleted.is_(None), + ) + ) + count = children.count() + 1 + children.update({'deleted': datetime.now()}, synchronize_session=False) + else: + count = 1 + return count + + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def update_node(session, uri, data=None): + node: GlossaryNode = session.query(GlossaryNode).get(uri) + if not node: + raise exceptions.ObjectNotFound('Node', uri) + for k in data.keys(): + setattr(node, k, data.get(k)) + return node + + @staticmethod + @has_tenant_permission(permissions.MANAGE_GLOSSARIES) + def link_term(session, uri, target_model: Resource, data): + term: GlossaryNode = session.query(GlossaryNode).get(uri) + if not term: + raise exceptions.ObjectNotFound('Node', uri) + if term.nodeType != 'T': + raise exceptions.InvalidInput( + 'NodeType', + 'term.nodeType', + 'associations are allowed for Glossary terms only', + ) + + target_uri: str = data['targetUri'] + target_type: str = data['targetType'] + + target = session.query(target_model).get(target_uri) + if not target: + raise exceptions.ObjectNotFound('Association target', uri) + + link = TermLink( + owner=get_context().username, + approvedByOwner=data.get('approvedByOwner', True), + approvedBySteward=data.get('approvedBySteward', True), + nodeUri=uri, + targetUri=target_uri, + targetType=target_type, + ) + session.add(link) + return link + + @staticmethod + def list_glossaries(session, data=None): + q = session.query(GlossaryNode).filter( + GlossaryNode.nodeType == 'G', GlossaryNode.deleted.is_(None) + ) + term = data.get('term') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike('%' + term + '%'), + GlossaryNode.readme.ilike('%' + term + '%'), + ) + ) + return paginate( + q, page_size=data.get('pageSize', 10), page=data.get('page', 1) + ).to_dict() + + @staticmethod + def list_categories(session, uri, data=None): + q = session.query(GlossaryNode).filter( + and_( + GlossaryNode.parentUri == uri, + GlossaryNode.nodeType == 'C', + GlossaryNode.deleted.is_(None), + ) + ) + + term = data.get('term') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + return paginate( + q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def list_terms(session, uri, data=None): + q = session.query(GlossaryNode).filter( + and_( + GlossaryNode.parentUri == uri, + GlossaryNode.nodeType == 'T', + GlossaryNode.deleted.is_(None), + ) + ) + term = data.get('term') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + return paginate( + q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def hierarchical_search(session, data=None): + q = session.query(GlossaryNode).options( + with_expression(GlossaryNode.isMatch, literal(True)) + ) + q = q.filter(GlossaryNode.deleted.is_(None)) + term = data.get('term', None) + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike('%' + term.upper() + '%'), + GlossaryNode.readme.ilike('%' + term.upper() + '%'), + ) + ) + matches = q.subquery('matches') + parents = aliased(GlossaryNode, name='parents') + children = aliased(GlossaryNode, name='children') + + if term: + parent_expr = case( + [ + ( + or_( + parents.label.ilike(f'%{term}%'), + parents.readme.ilike(f'%{term}%'), + ) + ) + ], + else_=literal(False), + ) + else: + parent_expr = literal(False) + + ascendants = ( + session.query(parents) + .options(with_expression(parents.isMatch, parent_expr)) + .join( + and_( + matches, + matches.c.path.startswith(parents.path), + matches, + matches.c.deleted.is_(None), + ) + ) + ) + + if term: + child_expr = case( + [ + ( + or_( + children.label.ilike(f'%{term}%'), + children.readme.ilike(f'%{term}%'), + ), + and_(children.deleted.is_(None)), + ) + ], + else_=literal(False), + ) + else: + child_expr = literal(False) + + descendants = ( + session.query(children) + .options(with_expression(children.isMatch, child_expr)) + .join( + matches, + children.path.startswith(matches.c.path), + ) + ) + + all = ascendants.union(descendants) + q = all.order_by(GlossaryNode.path) + + return paginate( + q, page=data.get('page', 1), page_size=data.get('pageSize', 100) + ).to_dict() + + @staticmethod + def search_terms(session, data=None): + q = session.query(GlossaryNode).filter( + GlossaryNode.deleted.is_(None) + ) + term = data.get('term') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + q = q.order_by(asc(GlossaryNode.path)) + return paginate( + q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def validate_params(data): + if not data: + exceptions.RequiredParameter('data') + if not data.get('label'): + exceptions.RequiredParameter('name') + + @staticmethod + def list_node_children(session, source, filter): + q = ( + session.query(GlossaryNode) + .filter(GlossaryNode.path.startswith(source.path + '/')) + .order_by(asc(GlossaryNode.path)) + ) + term = filter.get('term') + nodeType = filter.get('nodeType') + if term: + q = q.filter( + or_( + GlossaryNode.label.ilike(term), + GlossaryNode.readme.ilike(term), + ) + ) + if nodeType: + q = q.filter(GlossaryNode.nodeType == nodeType) + return paginate( + q, page_size=filter.get('pageSize', 10), page=filter.get('page', 1) + ).to_dict() + + @staticmethod + def list_term_associations(session, target_model_definitions, data=None): + source = data['source'] + filter = data['filter'] + + query = None + for definition in target_model_definitions: + model = definition.model + subquery = session.query( + definition.target_uri().label('targetUri'), + literal(definition.target_type.lower()).label('targetType'), + model.label.label('label'), + model.name.label('name'), + model.description.label('description'), + ) + if query: + query.union(subquery) + else: + query = subquery + + if query is None: + return Page([], 1, 1, 0) # empty page. All modules are turned off + + linked_objects = query.subquery('linked_objects') + + path = GlossaryNode.path + q = ( + session.query(TermLink) + .options(with_expression(TermLink.path, path)) + .join( + GlossaryNode, + GlossaryNode.nodeUri == TermLink.nodeUri, + ) + .join( + linked_objects, TermLink.targetUri == linked_objects.c.targetUri + ) + ) + if source.nodeType == 'T': + q = q.filter(TermLink.nodeUri == source.nodeUri) + elif source.nodeType in ['C', 'G']: + q = q.filter(GlossaryNode.path.startswith(source.path)) + else: + raise Exception(f'InvalidNodeType ({source.nodeUri}/{source.nodeType})') + + term = filter.get('term') + if term: + q = q.filter( + or_( + linked_objects.c.label.ilike('%' + term + '%'), + linked_objects.c.description.ilike(f'%{term}'), + linked_objects.c.targetType.ilike(f'%{term}'), + ) + ) + q = q.order_by(asc(path)) + + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 25) + ).to_dict() + + @staticmethod + def set_glossary_terms_links( + session, username, target_uri, target_type, glossary_terms + ): + current_links = session.query(TermLink).filter( + TermLink.targetUri == target_uri + ) + for current_link in current_links: + if current_link not in glossary_terms: + session.delete(current_link) + for nodeUri in glossary_terms: + + term = session.query(GlossaryNode).get(nodeUri) + if term: + link = ( + session.query(TermLink) + .filter( + TermLink.targetUri == target_uri, + TermLink.nodeUri == nodeUri, + ) + .first() + ) + if not link: + new_link = TermLink( + targetUri=target_uri, + nodeUri=nodeUri, + targetType=target_type, + owner=username, + approvedByOwner=True, + ) + session.add(new_link) + session.commit() + + @staticmethod + def get_glossary_terms_links(session, target_uri, target_type): + terms = ( + session.query(GlossaryNode) + .join( + TermLink, TermLink.nodeUri == GlossaryNode.nodeUri + ) + .filter( + and_( + TermLink.targetUri == target_uri, + TermLink.targetType == target_type, + ) + ) + ) + + return paginate(terms, page_size=10000, page=1).to_dict() + + @staticmethod + def delete_glossary_terms_links(session, target_uri, target_type): + term_links = ( + session.query(TermLink) + .filter( + and_( + TermLink.targetUri == target_uri, + TermLink.targetType == target_type, + ) + ) + .all() + ) + for link in term_links: + session.delete(link) diff --git a/frontend/src/api/Glossary/listSubCategories.js b/backend/dataall/modules/catalog/indexers/__init__.py similarity index 100% rename from frontend/src/api/Glossary/listSubCategories.js rename to backend/dataall/modules/catalog/indexers/__init__.py diff --git a/backend/dataall/modules/catalog/indexers/base_indexer.py b/backend/dataall/modules/catalog/indexers/base_indexer.py new file mode 100644 index 000000000..65091d626 --- /dev/null +++ b/backend/dataall/modules/catalog/indexers/base_indexer.py @@ -0,0 +1,73 @@ +import logging +import os +from abc import ABC, abstractmethod +from datetime import datetime +from operator import and_ + +from sqlalchemy.orm import with_expression + +from dataall.modules.catalog.db.glossary_models import GlossaryNode, TermLink +from dataall.base.searchproxy import connect + +log = logging.getLogger(__name__) + + +class BaseIndexer(ABC): + """API to work with OpenSearch""" + _INDEX = 'dataall-index' + _es = None + + @classmethod + def es(cls): + """Lazy creation of the OpenSearch connection""" + if cls._es is None: + es = connect(envname=os.getenv('envname', 'local')) + if not es: + raise Exception('Failed to create ES connection') + cls._es = es + + return cls._es + + @staticmethod + @abstractmethod + def upsert(session, target_id): + raise NotImplementedError("Method upsert is not implemented") + + @classmethod + def delete_doc(cls, doc_id): + es = cls.es() + es.delete(index=cls._INDEX, id=doc_id, ignore=[400, 404]) + return True + + @classmethod + def _index(cls, doc_id, doc): + es = cls.es() + doc['_indexed'] = datetime.now() + if es: + res = es.index(index=cls._INDEX, id=doc_id, body=doc) + log.info(f'doc {doc} for id {doc_id} indexed with response {res}') + return True + else: + log.error(f'ES config is missing doc {doc} for id {doc_id} was not indexed') + return False + + @staticmethod + def _get_target_glossary_terms(session, target_uri): + q = ( + session.query(TermLink) + .options( + with_expression(TermLink.path, GlossaryNode.path), + with_expression(TermLink.label, GlossaryNode.label), + with_expression(TermLink.readme, GlossaryNode.readme), + ) + .join( + GlossaryNode, GlossaryNode.nodeUri == TermLink.nodeUri + ) + .filter( + and_( + TermLink.targetUri == target_uri, + TermLink.approvedBySteward.is_(True), + ) + ) + ) + return [t.path for t in q] diff --git a/backend/dataall/modules/catalog/indexers/catalog_indexer.py b/backend/dataall/modules/catalog/indexers/catalog_indexer.py new file mode 100644 index 000000000..e2e7b4496 --- /dev/null +++ b/backend/dataall/modules/catalog/indexers/catalog_indexer.py @@ -0,0 +1,16 @@ +from abc import ABC +from typing import List + + +class CatalogIndexer(ABC): + _INDEXERS: List['CatalogIndexer'] = [] + + def __init__(self): + CatalogIndexer._INDEXERS.append(self) + + @staticmethod + def all(): + return CatalogIndexer._INDEXERS + + def index(self, session) -> int: + raise NotImplementedError("index is not implemented") diff --git a/backend/dataall/modules/catalog/tasks/__init__.py b/backend/dataall/modules/catalog/tasks/__init__.py new file mode 100644 index 000000000..68ded09e2 --- /dev/null +++ b/backend/dataall/modules/catalog/tasks/__init__.py @@ -0,0 +1 @@ +from . import catalog_indexer_task diff --git a/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py b/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py new file mode 100644 index 000000000..570c8b6c3 --- /dev/null +++ b/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py @@ -0,0 +1,36 @@ +import logging +import os +import sys + +from dataall.modules.catalog.indexers.catalog_indexer import CatalogIndexer +from dataall.base.db import get_engine +from dataall.base.loader import load_modules, ImportMode +from dataall.base.utils.alarm_service import AlarmService + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + + +def index_objects(engine): + try: + indexed_objects_counter = 0 + with engine.scoped_session() as session: + for indexer in CatalogIndexer.all(): + indexed_objects_counter += indexer.index(session) + + log.info(f'Successfully indexed {indexed_objects_counter} objects') + return indexed_objects_counter + except Exception as e: + AlarmService().trigger_catalog_indexing_failure_alarm(error=str(e)) + raise e + + +if __name__ == '__main__': + ENVNAME = os.environ.get('envname', 'local') + ENGINE = get_engine(envname=ENVNAME) + + load_modules({ImportMode.CATALOG_INDEXER_TASK}) + index_objects(engine=ENGINE) diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py new file mode 100644 index 000000000..75e7f4091 --- /dev/null +++ b/backend/dataall/modules/dashboards/__init__.py @@ -0,0 +1,77 @@ +"""Contains the code related to dashboards""" +import logging +from typing import Set, List, Type + +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository +from dataall.modules.dashboards.db.dashboard_models import Dashboard +from dataall.base.loader import ImportMode, ModuleInterface + + +log = logging.getLogger(__name__) + + +class DashboardApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for dashboard GraphQl lambda""" + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.feed import FeedApiModuleInterface + from dataall.modules.vote import VoteApiModuleInterface + from dataall.modules.catalog import CatalogApiModuleInterface + + return [FeedApiModuleInterface, CatalogApiModuleInterface, VoteApiModuleInterface] + + def __init__(self): + import dataall.modules.dashboards.api + from dataall.modules.feed.api.registry import FeedRegistry, FeedDefinition + from dataall.modules.catalog.api.registry import GlossaryRegistry, GlossaryDefinition + from dataall.modules.vote.api.resolvers import add_vote_type + from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer + + FeedRegistry.register(FeedDefinition("Dashboard", Dashboard)) + + GlossaryRegistry.register(GlossaryDefinition( + target_type="Dashboard", + object_type="Dashboard", + model=Dashboard, + reindexer=DashboardIndexer + )) + + add_vote_type("dashboard", DashboardIndexer) + + EnvironmentResourceManager.register(DashboardRepository()) + log.info("Dashboard API has been loaded") + + +class DashboardCdkModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.dashboards.cdk + log.info("Dashboard CDK code has been loaded") + + +class DashboardCatalogIndexerModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CATALOG_INDEXER_TASK in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.catalog import CatalogIndexerModuleInterface + return [CatalogIndexerModuleInterface] + + def __init__(self): + from dataall.modules.dashboards.indexers.dashboard_catalog_indexer import DashboardCatalogIndexer + + DashboardCatalogIndexer() + log.info("Dashboard catalog indexer task has been loaded") diff --git a/backend/dataall/modules/dashboards/api/__init__.py b/backend/dataall/modules/dashboards/api/__init__.py new file mode 100644 index 000000000..51577f7f8 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.dashboards.api import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/dashboards/api/enums.py b/backend/dataall/modules/dashboards/api/enums.py new file mode 100644 index 000000000..19160375d --- /dev/null +++ b/backend/dataall/modules/dashboards/api/enums.py @@ -0,0 +1,8 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class DashboardRole(GraphQLEnumMapper): + Creator = '999' + Admin = '900' + Shared = '800' + NoPermission = '000' diff --git a/backend/dataall/modules/dashboards/api/input_types.py b/backend/dataall/modules/dashboards/api/input_types.py new file mode 100644 index 000000000..12e33ca97 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/input_types.py @@ -0,0 +1,43 @@ +from dataall.base.api import gql + +ImportDashboardInput = gql.InputType( + name='ImportDashboardInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='SamlGroupName', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), + gql.Argument(name='terms', type=gql.ArrayType(gql.String)), + ], +) + +UpdateDashboardInput = gql.InputType( + name='UpdateDashboardInput', + arguments=[ + gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='terms', type=gql.ArrayType(gql.String)), + ], +) + +DashboardFilter = gql.InputType( + name='DashboardFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +DashboardShareFilter = gql.InputType( + name='DashboardShareFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) diff --git a/backend/dataall/modules/dashboards/api/mutations.py b/backend/dataall/modules/dashboards/api/mutations.py new file mode 100644 index 000000000..cf30ee486 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/mutations.py @@ -0,0 +1,81 @@ +from dataall.base.api import gql +from dataall.modules.dashboards.api.resolvers import * + + +importDashboard = gql.MutationField( + name='importDashboard', + type=gql.Ref('Dashboard'), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('ImportDashboardInput')) + ) + ], + resolver=import_dashboard, +) + +updateDashboard = gql.MutationField( + name='updateDashboard', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('UpdateDashboardInput')) + ), + ], + type=gql.Ref('Dashboard'), + resolver=update_dashboard, +) + + +deleteDashboard = gql.MutationField( + name='deleteDashboard', + type=gql.Boolean, + args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], + resolver=delete_dashboard, +) + + +shareDashboard = gql.MutationField( + name='shareDashboard', + type=gql.Ref('DashboardShare'), + args=[ + gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), + gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), + ], + resolver=share_dashboard, +) + +requestDashboardShare = gql.MutationField( + name='requestDashboardShare', + type=gql.Ref('DashboardShare'), + args=[ + gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), + gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), + ], + resolver=request_dashboard_share, +) + +approveDashboardShare = gql.MutationField( + name='approveDashboardShare', + type=gql.Ref('DashboardShare'), + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + ], + resolver=approve_dashboard_share, +) + +rejectDashboardShare = gql.MutationField( + name='rejectDashboardShare', + type=gql.Ref('DashboardShare'), + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + ], + resolver=reject_dashboard_share, +) + +createQuicksightDataSourceSet = gql.MutationField( + name='createQuicksightDataSourceSet', + args=[ + gql.Argument(name='vpcConnectionId', type=gql.NonNullableType(gql.String)) + ], + type=gql.String, + resolver=create_quicksight_data_source_set, +) diff --git a/backend/dataall/modules/dashboards/api/queries.py b/backend/dataall/modules/dashboards/api/queries.py new file mode 100644 index 000000000..72155e303 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/queries.py @@ -0,0 +1,74 @@ +from dataall.base.api import gql +from dataall.modules.dashboards.api.resolvers import * + +searchDashboards = gql.QueryField( + name='searchDashboards', + args=[gql.Argument(name='filter', type=gql.Ref('DashboardFilter'))], + resolver=list_dashboards, + type=gql.Ref('DashboardSearchResults'), +) + +getDashboard = gql.QueryField( + name='getDashboard', + args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('Dashboard'), + resolver=get_dashboard, +) + +getMonitoringDashboardId = gql.QueryField( + name='getMonitoringDashboardId', + type=gql.String, + resolver=get_monitoring_dashboard_id, +) + +getMonitoringVpcConnectionId = gql.QueryField( + name='getMonitoringVPCConnectionId', + type=gql.String, + resolver=get_monitoring_vpc_connection_id, +) + +getPlatformAuthorSession = gql.QueryField( + name='getPlatformAuthorSession', + args=[ + gql.Argument(name='awsAccount', type=gql.NonNullableType(gql.String)), + ], + type=gql.String, + resolver=get_quicksight_author_session, +) + +getPlatformReaderSession = gql.QueryField( + name='getPlatformReaderSession', + args=[ + gql.Argument(name='dashboardId', type=gql.NonNullableType(gql.String)), + ], + type=gql.String, + resolver=get_quicksight_reader_session, +) + +getAuthorSession = gql.QueryField( + name='getAuthorSession', + args=[ + gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.String, + resolver=get_quicksight_designer_url, +) + + +getReaderSession = gql.QueryField( + name='getReaderSession', + args=[gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_quicksight_reader_url, +) + +listDashboardShares = gql.QueryField( + name='listDashboardShares', + args=[ + gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DashboardShareFilter')), + ], + resolver=list_dashboard_shares, + type=gql.Ref('DashboardShareSearchResults'), +) diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py new file mode 100644 index 000000000..7096b00e8 --- /dev/null +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -0,0 +1,148 @@ +from dataall.base.api.context import Context +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.base.db.exceptions import RequiredParameter +from dataall.modules.dashboards.api.enums import DashboardRole +from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository +from dataall.modules.dashboards.db.dashboard_models import Dashboard +from dataall.modules.dashboards.services.dashboard_quicksight_service import DashboardQuicksightService +from dataall.modules.dashboards.services.dashboard_service import DashboardService +from dataall.modules.dashboards.services.dashboard_share_service import DashboardShareService + + +def import_dashboard(context: Context, source, input: dict = None): + if not input: + raise RequiredParameter(input) + if not input.get('environmentUri'): + raise RequiredParameter('environmentUri') + if not input.get('SamlGroupName'): + raise RequiredParameter('group') + if not input.get('dashboardId'): + raise RequiredParameter('dashboardId') + if not input.get('label'): + raise RequiredParameter('label') + + return DashboardService.import_dashboard( + uri=input['environmentUri'], + admin_group=input['SamlGroupName'], + data=input + ) + + +def update_dashboard(context, source, input: dict = None): + return DashboardService.update_dashboard(uri=input['dashboardUri'], data=input) + + +def list_dashboards(context: Context, source, filter: dict = None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DashboardRepository.paginated_user_dashboards( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + +def get_dashboard(context: Context, source, dashboardUri: str = None): + return DashboardService.get_dashboard(uri=dashboardUri) + + +def resolve_user_role(context: Context, source: Dashboard): + if context.username and source.owner == context.username: + return DashboardRole.Creator.value + elif context.groups and source.SamlGroupName in context.groups: + return DashboardRole.Admin.value + return DashboardRole.Shared.value + + +def get_dashboard_organization(context: Context, source: Dashboard, **kwargs): + with context.engine.scoped_session() as session: + return Organization.get_organization_by_uri(session, source.organizationUri) + + +def request_dashboard_share( + context: Context, + source: Dashboard, + principalId: str = None, + dashboardUri: str = None, +): + return DashboardShareService.request_dashboard_share(uri=dashboardUri, principal_id=principalId) + + +def approve_dashboard_share(context: Context, source: Dashboard, shareUri: str = None): + return DashboardShareService.approve_dashboard_share(uri=shareUri) + + +def reject_dashboard_share(context: Context, source: Dashboard, shareUri: str = None): + return DashboardShareService.reject_dashboard_share(uri=shareUri) + + +def list_dashboard_shares( + context: Context, + source: Dashboard, + dashboardUri: str = None, + filter: dict = None, +): + if not filter: + filter = {} + return DashboardShareService.list_dashboard_shares(uri=dashboardUri, data=filter) + + +def share_dashboard( + context: Context, + source: Dashboard, + principalId: str = None, + dashboardUri: str = None, +): + return DashboardShareService.share_dashboard(uri=dashboardUri, principal_id=principalId) + + +def delete_dashboard(context: Context, source, dashboardUri: str = None): + return DashboardService.delete_dashboard(uri=dashboardUri) + + +def resolve_glossary_terms(context: Context, source: Dashboard, **kwargs): + with context.engine.scoped_session() as session: + return Glossary.get_glossary_terms_links( + session, source.dashboardUri, 'Dashboard' + ) + + +def resolve_upvotes(context: Context, source: Dashboard, **kwargs): + with context.engine.scoped_session() as session: + return Vote.count_upvotes( + session, source.dashboardUri, target_type='dashboard' + ) + + +def get_monitoring_dashboard_id(context, source): + return DashboardQuicksightService.get_monitoring_dashboard_id() + + +def get_monitoring_vpc_connection_id(context, source): + return DashboardQuicksightService.get_monitoring_vpc_connection_id() + + +def create_quicksight_data_source_set(context, source, vpcConnectionId: str = None): + return DashboardQuicksightService.create_quicksight_data_source_set(vpcConnectionId) + + +def get_quicksight_author_session(context, source, awsAccount: str = None): + return DashboardQuicksightService.get_quicksight_author_session(awsAccount) + + +def get_quicksight_reader_session(context, source, dashboardId: str = None): + return DashboardQuicksightService.get_quicksight_reader_session(dashboardId) + + +def get_quicksight_reader_url(context, source, dashboardUri: str = None): + return DashboardQuicksightService.get_quicksight_reader_url(uri=dashboardUri) + + +def get_quicksight_designer_url( + context, source, environmentUri: str = None, dashboardUri: str = None +): + return DashboardQuicksightService.get_quicksight_designer_url(uri=environmentUri) diff --git a/backend/dataall/modules/dashboards/api/types.py b/backend/dataall/modules/dashboards/api/types.py new file mode 100644 index 000000000..afa2df3af --- /dev/null +++ b/backend/dataall/modules/dashboards/api/types.py @@ -0,0 +1,85 @@ +from dataall.base.api import gql +from dataall.modules.dashboards.api.resolvers import * + +from dataall.core.environment.api.resolvers import resolve_environment + +Dashboard = gql.ObjectType( + name='Dashboard', + fields=[ + gql.Field('dashboardUri', type=gql.ID), + gql.Field('name', type=gql.String), + gql.Field('label', type=gql.String), + gql.Field('description', type=gql.String), + gql.Field('DashboardId', type=gql.String), + gql.Field('tags', type=gql.ArrayType(gql.String)), + gql.Field('created', type=gql.String), + gql.Field('updated', type=gql.String), + gql.Field('owner', type=gql.String), + gql.Field('SamlGroupName', type=gql.String), + gql.Field( + 'organization', + type=gql.Ref('Organization'), + resolver=get_dashboard_organization, + ), + gql.Field( + 'environment', + type=gql.Ref('Environment'), + resolver=resolve_environment, + ), + gql.Field( + 'userRoleForDashboard', + type=DashboardRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field( + name='terms', + type=gql.Ref('TermSearchResult'), + resolver=resolve_glossary_terms, + ), + gql.Field( + 'upvotes', + type=gql.Integer, + resolver=resolve_upvotes, + ), + ], +) + +DashboardShare = gql.ObjectType( + name='DashboardShare', + fields=[ + gql.Field('shareUri', type=gql.ID), + gql.Field('dashboardUri', type=gql.ID), + gql.Field('name', type=gql.String), + gql.Field('label', type=gql.String), + gql.Field('SamlGroupName', type=gql.String), + gql.Field('status', type=gql.String), + gql.Field('owner', type=gql.String), + gql.Field('tags', type=gql.ArrayType(gql.String)), + gql.Field('created', type=gql.String), + gql.Field('updated', type=gql.String), + ], +) + +DashboardSearchResults = gql.ObjectType( + name='DashboardSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(Dashboard)), + ], +) + +DashboardShareSearchResults = gql.ObjectType( + name='DashboardShareSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(DashboardShare)), + ], +) diff --git a/frontend/src/api/Worksheet/deleteWorksheetShare.js b/backend/dataall/modules/dashboards/aws/__init__.py similarity index 100% rename from frontend/src/api/Worksheet/deleteWorksheetShare.js rename to backend/dataall/modules/dashboards/aws/__init__.py diff --git a/backend/dataall/modules/dashboards/aws/dashboard_quicksight_client.py b/backend/dataall/modules/dashboards/aws/dashboard_quicksight_client.py new file mode 100644 index 000000000..118da357e --- /dev/null +++ b/backend/dataall/modules/dashboards/aws/dashboard_quicksight_client.py @@ -0,0 +1,264 @@ +import logging +import os +import ast + +from botocore.exceptions import ClientError + +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.base.aws.quicksight import QuicksightClient +from dataall.base.aws.secrets_manager import SecretsManager +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger(__name__) +log.setLevel(logging.DEBUG) + + +class DashboardQuicksightClient: + _DEFAULT_GROUP_NAME = QuicksightClient.DEFAULT_GROUP_NAME + + def __init__(self, username, aws_account_id, region='eu-west-1'): + session = SessionHelper.remote_session(accountid=aws_account_id) + self._client = session.client('quicksight', region_name=region) + self._account_id = aws_account_id + self._region = region + self._username = username + + def register_user_in_group(self, group_name, user_role='READER'): + QuicksightClient.create_quicksight_group(self._account_id, group_name) + user = self._describe_user() + + if user is not None: + self._client.update_user( + UserName=self._username, + AwsAccountId=self._account_id, + Namespace='default', + Email=self._username, + Role=user_role, + ) + else: + self._client.register_user( + UserName=self._username, + Email=self._username, + AwsAccountId=self._account_id, + Namespace='default', + IdentityType='QUICKSIGHT', + UserRole=user_role, + ) + + response = self._client.list_user_groups( + UserName=self._username, AwsAccountId=self._account_id, Namespace='default' + ) + log.info(f'list_user_groups for {self._username}: {response})') + if group_name not in [g['GroupName'] for g in response['GroupList']]: + log.warning(f'Adding {self._username} to Quicksight group {group_name} on {self._account_id}') + self._client.create_group_membership( + MemberName=self._username, + GroupName=group_name, + AwsAccountId=self._account_id, + Namespace='default', + ) + return self._describe_user() + + def get_reader_session(self, user_role="READER", dashboard_id=None, domain_name: str = None): + user = self._describe_user() + if user is None: + user = self.register_user_in_group(self._DEFAULT_GROUP_NAME, user_role) + + response = self._client.generate_embed_url_for_registered_user( + AwsAccountId=self._account_id, + SessionLifetimeInMinutes=120, + UserArn=user.get("Arn"), + ExperienceConfiguration={ + "Dashboard": { + "InitialDashboardId": dashboard_id, + }, + }, + AllowedDomains=[domain_name], + ) + return response.get('EmbedUrl') + + def get_shared_reader_session(self, group_name, user_role='READER', dashboard_id=None): + aws_account_id = self._account_id + identity_region = QuicksightClient.get_identity_region(aws_account_id) + group_principal = f"arn:aws:quicksight:{identity_region}:{aws_account_id}:group/default/{group_name}" + + user = self.register_user_in_group(group_name, user_role) + + read_principals, write_principals = self._check_dashboard_permissions(dashboard_id) + + if group_principal not in read_principals: + permissions = self._client.update_dashboard_permissions( + AwsAccountId=aws_account_id, + DashboardId=dashboard_id, + GrantPermissions=[ + { + 'Principal': group_principal, + 'Actions': [ + "quicksight:DescribeDashboard", + "quicksight:ListDashboardVersions", + "quicksight:QueryDashboard", + ] + }, + ] + ) + log.info(f"Permissions granted: {permissions}") + + response = self._client.get_dashboard_embed_url( + AwsAccountId=aws_account_id, + DashboardId=dashboard_id, + IdentityType='QUICKSIGHT', + SessionLifetimeInMinutes=120, + UserArn=user.get('Arn'), + ) + return response.get('EmbedUrl') + + def get_anonymous_session(self, dashboard_id=None): + response = self._client.generate_embed_url_for_anonymous_user( + AwsAccountId=self._account_id, + SessionLifetimeInMinutes=120, + Namespace='default', + SessionTags=[{'Key': self._DEFAULT_GROUP_NAME, 'Value': self._username}], + AuthorizedResourceArns=[ + f'arn:aws:quicksight:{self._region}:{self._account_id}:dashboard/{dashboard_id}', + ], + ExperienceConfiguration={'Dashboard': {'InitialDashboardId': dashboard_id}}, + ) + return response.get('EmbedUrl') + + def get_author_session(self): + user = self._describe_user() + if user is None or user.get("Role", None) not in ["AUTHOR", "ADMIN"]: + user = self.register_user_in_group(self._DEFAULT_GROUP_NAME, "AUTHOR") + + response = self._client.get_session_embed_url( + AwsAccountId=self._account_id, + EntryPoint='/start/dashboards', + SessionLifetimeInMinutes=120, + UserArn=user['Arn'], + ) + return response['EmbedUrl'] + + def can_import_dashboard(self, dashboard_id): + user = self._describe_user() + if not user: + return False + + groups = self._list_user_groups() + grouparns = [g['Arn'] for g in groups] + try: + response = self._client.describe_dashboard_permissions( + AwsAccountId=self._account_id, DashboardId=dashboard_id + ) + except ClientError as e: + raise e + + permissions = response.get('Permissions', []) + for p in permissions: + if p['Principal'] == user.get('Arn') or p['Principal'] in grouparns: + for a in p['Actions']: + if a in [ + 'quicksight:UpdateDashboard', + 'UpdateDashboardPermissions', + ]: + return True + + return False + + def create_data_source_vpc(self, vpc_connection_id): + client = self._client + aws_account_id = self._account_id + region = self._region + + self.register_user_in_group(self._DEFAULT_GROUP_NAME, 'AUTHOR') + try: + client.describe_data_source( + AwsAccountId=aws_account_id, DataSourceId="dataall-metadata-db" + ) + + except client.exceptions.ResourceNotFoundException: + aurora_secret_arn = ParameterStoreManager.get_parameter_value( + AwsAccountId=aws_account_id, + region=region, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/aurora/secret_arn' + ) + + aurora_params = SecretsManager(aws_account_id, region).get_secret_value( + secret_id=aurora_secret_arn + ) + aurora_params_dict = ast.literal_eval(aurora_params) + client.create_data_source( + AwsAccountId=aws_account_id, + DataSourceId="dataall-metadata-db", + Name="dataall-metadata-db", + Type="AURORA_POSTGRESQL", + DataSourceParameters={ + 'AuroraPostgreSqlParameters': { + 'Host': aurora_params_dict["host"], + 'Port': "5432", + 'Database': aurora_params_dict["dbname"] + } + }, + Credentials={ + "CredentialPair": { + "Username": aurora_params_dict["username"], + "Password": aurora_params_dict["password"], + } + }, + Permissions=[ + { + "Principal": f"arn:aws:quicksight:{region}:{aws_account_id}:group/default/dataall", + "Actions": [ + "quicksight:UpdateDataSourcePermissions", + "quicksight:DescribeDataSource", + "quicksight:DescribeDataSourcePermissions", + "quicksight:PassDataSource", + "quicksight:UpdateDataSource", + "quicksight:DeleteDataSource" + ] + } + ], + VpcConnectionProperties={ + 'VpcConnectionArn': f"arn:aws:quicksight:{region}:{aws_account_id}:vpcConnection/" + f"{vpc_connection_id}" + } + ) + + return "dataall-metadata-db" + + def _check_dashboard_permissions(self, dashboard_id): + response = self._client.describe_dashboard_permissions( + AwsAccountId=self._account_id, + DashboardId=dashboard_id + )['Permissions'] + log.info(f"Dashboard initial permissions: {response}") + read_principals = [] + write_principals = [] + + for a, p in zip([p["Actions"] for p in response], [p["Principal"] for p in response]): + write_principals.append(p) if "Update" in str(a) else read_principals.append(p) + + log.info(f"Dashboard updated permissions, Read principals: {read_principals}") + log.info(f"Dashboard updated permissions, Write principals: {write_principals}") + + return read_principals, write_principals + + def _list_user_groups(self): + client = QuicksightClient.get_quicksight_client_in_identity_region(self._account_id) + user = self._describe_user() + if not user: + return [] + response = client.list_user_groups( + UserName=self._username, AwsAccountId=self._account_id, Namespace='default' + ) + return response['GroupList'] + + def _describe_user(self): + """Describes a QS user, returns None if not found""" + client = QuicksightClient.get_quicksight_client_in_identity_region(self._account_id) + try: + response = client.describe_user( + UserName=self._username, AwsAccountId=self._account_id, Namespace='default' + ) + except ClientError: + return None + return response.get('User') diff --git a/backend/dataall/modules/dashboards/cdk/__init__.py b/backend/dataall/modules/dashboards/cdk/__init__.py new file mode 100644 index 000000000..be0865cd1 --- /dev/null +++ b/backend/dataall/modules/dashboards/cdk/__init__.py @@ -0,0 +1,4 @@ +from dataall.modules.dashboards.cdk import env_role_dashboards_policy +from dataall.modules.dashboards.cdk import pivot_role_dashboards_policy + +__all__ = ['env_role_dashboards_policy', 'pivot_role_dashboards_policy'] diff --git a/backend/dataall/modules/dashboards/cdk/env_role_dashboards_policy.py b/backend/dataall/modules/dashboards/cdk/env_role_dashboards_policy.py new file mode 100644 index 000000000..101c7dcd0 --- /dev/null +++ b/backend/dataall/modules/dashboards/cdk/env_role_dashboards_policy.py @@ -0,0 +1,32 @@ +from aws_cdk import aws_iam as iam + +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.dashboards.services.dashboard_permissions import CREATE_DASHBOARD + + +class QuickSightPolicy(ServicePolicy): + """ + Class including all permissions needed to work with Amazon Quicksight. + It allows data.all users to: + - + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_DASHBOARD not in group_permissions: + return [] + + return [ + iam.PolicyStatement( + # sid="QuicksightList", + effect=iam.Effect.ALLOW, + actions=['quicksight:List*'], + resources=['*'], + ), + iam.PolicyStatement( + # sid="QuicksightManageTeamResources", + effect=iam.Effect.ALLOW, + actions=['quicksight:*'], + resources=[ + f'arn:aws:quicksight:{self.region}:{self.account}:*/{self.resource_prefix}-{self.team.groupUri}*' + ], + ), + ] diff --git a/backend/dataall/modules/dashboards/cdk/pivot_role_dashboards_policy.py b/backend/dataall/modules/dashboards/cdk/pivot_role_dashboards_policy.py new file mode 100644 index 000000000..eb4fcbf98 --- /dev/null +++ b/backend/dataall/modules/dashboards/cdk/pivot_role_dashboards_policy.py @@ -0,0 +1,53 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class DashboardsPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS Quicksight. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='QuickSight', + effect=iam.Effect.ALLOW, + actions=[ + 'quicksight:CreateGroup', + 'quicksight:DescribeGroup', + 'quicksight:ListDashboards', + 'quicksight:DescribeDataSource', + 'quicksight:DescribeDashboard', + 'quicksight:DescribeUser', + 'quicksight:SearchDashboards', + 'quicksight:GetDashboardEmbedUrl', + 'quicksight:GenerateEmbedUrlForAnonymousUser', + 'quicksight:UpdateUser', + 'quicksight:ListUserGroups', + 'quicksight:RegisterUser', + 'quicksight:DescribeDashboardPermissions', + 'quicksight:UpdateDashboardPermissions', + 'quicksight:GetAuthCode', + 'quicksight:CreateGroupMembership', + 'quicksight:DescribeAccountSubscription', + ], + resources=[ + f'arn:aws:quicksight:*:{self.account}:group/default/*', + f'arn:aws:quicksight:*:{self.account}:user/default/*', + f'arn:aws:quicksight:*:{self.account}:datasource/*', + f'arn:aws:quicksight:*:{self.account}:user/*', + f'arn:aws:quicksight:*:{self.account}:dashboard/*', + f'arn:aws:quicksight:*:{self.account}:namespace/default', + f'arn:aws:quicksight:*:{self.account}:account/*', + f'arn:aws:quicksight:*:{self.account}:*', + ], + ), + iam.PolicyStatement( + sid='QuickSightSession', + effect=iam.Effect.ALLOW, + actions=['quicksight:GetSessionEmbedUrl'], + resources=['*'], + ), + ] + return statements diff --git a/backend/dataall/modules/dashboards/db/__init__.py b/backend/dataall/modules/dashboards/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dashboards/db/dashboard_models.py b/backend/dataall/modules/dashboards/db/dashboard_models.py new file mode 100644 index 000000000..436a9ba27 --- /dev/null +++ b/backend/dataall/modules/dashboards/db/dashboard_models.py @@ -0,0 +1,45 @@ +from enum import Enum + +from sqlalchemy import Column, String, ForeignKey +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base, Resource, utils + + +class DashboardShareStatus(Enum): + REQUESTED = 'REQUESTED' + APPROVED = 'APPROVED' + REJECTED = 'REJECTED' + + +class DashboardShare(Base): + __tablename__ = 'dashboardshare' + shareUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('shareddashboard') + ) + dashboardUri = Column(String, nullable=False, default=utils.uuid('dashboard')) + SamlGroupName = Column(String, nullable=False) + owner = Column(String, nullable=True) + status = Column( + String, nullable=False, default=DashboardShareStatus.REQUESTED.value + ) + + +class Dashboard(Resource, Base): + __tablename__ = 'dashboard' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + organizationUri = Column(String, nullable=False) + dashboardUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('dashboard') + ) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + namespace = Column(String, nullable=False) + DashboardId = Column(String, nullable=False) + SamlGroupName = Column(String, nullable=False) + + userRoleForDashboard = query_expression() + + @classmethod + def uri(cls): + return cls.dashboardUri diff --git a/backend/dataall/modules/dashboards/db/dashboard_repositories.py b/backend/dataall/modules/dashboards/db/dashboard_repositories.py new file mode 100644 index 000000000..91916f8ff --- /dev/null +++ b/backend/dataall/modules/dashboards/db/dashboard_repositories.py @@ -0,0 +1,189 @@ +import logging + +from sqlalchemy import or_, and_ +from sqlalchemy.orm import Query + +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.base.db import exceptions, paginate +from dataall.modules.dashboards.db.dashboard_models import DashboardShare, DashboardShareStatus, Dashboard + +logger = logging.getLogger(__name__) + + +class DashboardRepository(EnvironmentResource): + + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return ( + session.query(Dashboard) + .filter( + and_( + Dashboard.environmentUri == environment.environmentUri, + Dashboard.SamlGroupName == group_uri + )) + .count() + ) + + @staticmethod + def update_env(session, environment): + return EnvironmentService.get_boolean_env_param(session, environment, "dashboardsEnabled") + + @staticmethod + def create_dashboard(session, env, username: str, data: dict = None) -> Dashboard: + dashboard: Dashboard = Dashboard( + label=data.get('label', 'untitled'), + environmentUri=data.get('environmentUri'), + organizationUri=env.organizationUri, + region=env.region, + DashboardId=data.get('dashboardId'), + AwsAccountId=env.AwsAccountId, + owner=username, + namespace='test', + tags=data.get('tags', []), + SamlGroupName=data['SamlGroupName'], + ) + session.add(dashboard) + session.commit() + return dashboard + + @staticmethod + def get_dashboard_by_uri(session, uri) -> Dashboard: + dashboard: Dashboard = session.query(Dashboard).get(uri) + if not dashboard: + raise exceptions.ObjectNotFound('Dashboard', uri) + return dashboard + + @staticmethod + def _query_user_dashboards(session, username, groups, filter) -> Query: + query = ( + session.query(Dashboard) + .outerjoin( + DashboardShare, + Dashboard.dashboardUri == DashboardShare.dashboardUri, + ) + .filter( + or_( + Dashboard.owner == username, + Dashboard.SamlGroupName.in_(groups), + and_( + DashboardShare.SamlGroupName.in_(groups), + DashboardShare.status + == DashboardShareStatus.APPROVED.value, + ), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + Dashboard.description.ilike(filter.get('term') + '%%'), + Dashboard.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + @staticmethod + def paginated_user_dashboards( + session, username, groups, data=None + ) -> dict: + return paginate( + query=DashboardRepository._query_user_dashboards(session, username, groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def _query_dashboard_shares(session, username, groups, uri, filter) -> Query: + query = ( + session.query(DashboardShare) + .join( + Dashboard, + Dashboard.dashboardUri == DashboardShare.dashboardUri, + ) + .filter( + and_( + DashboardShare.dashboardUri == uri, + or_( + Dashboard.owner == username, + Dashboard.SamlGroupName.in_(groups), + ), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + DashboardShare.SamlGroupName.ilike( + filter.get('term') + '%%' + ), + Dashboard.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + @staticmethod + def query_all_user_groups_shareddashboard(session, groups, uri) -> [str]: + query = ( + session.query(DashboardShare) + .filter( + and_( + DashboardShare.dashboardUri == uri, + DashboardShare.SamlGroupName.in_(groups), + ) + ) + ) + + return [share.SamlGroupName for share in query.all()] + + @staticmethod + def paginated_dashboard_shares( + session, username, groups, uri, data=None + ) -> dict: + return paginate( + query=DashboardRepository._query_dashboard_shares( + session, username, groups, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def delete_dashboard(session, dashboard) -> bool: + session.delete(dashboard) + return True + + @staticmethod + def create_share( + session, + username: str, + dashboard: Dashboard, + principal_id: str, + init_status: DashboardShareStatus = DashboardShareStatus.REQUESTED + ) -> DashboardShare: + share = DashboardShare( + owner=username, + dashboardUri=dashboard.dashboardUri, + SamlGroupName=principal_id, + status=init_status.value, + ) + session.add(share) + return share + + @staticmethod + def get_dashboard_share_by_uri(session, uri) -> DashboardShare: + share: DashboardShare = session.query(DashboardShare).get(uri) + if not share: + raise exceptions.ObjectNotFound('DashboardShare', uri) + return share + + @staticmethod + def find_share_for_group(session, dashboard_uri, group) -> DashboardShare: + return ( + session.query(DashboardShare) + .filter( + DashboardShare.dashboardUri == dashboard_uri, + DashboardShare.SamlGroupName == group, + ) + .first() + ) diff --git a/backend/dataall/modules/dashboards/indexers/__init__.py b/backend/dataall/modules/dashboards/indexers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py new file mode 100644 index 000000000..478ce08b5 --- /dev/null +++ b/backend/dataall/modules/dashboards/indexers/dashboard_catalog_indexer.py @@ -0,0 +1,19 @@ +import logging + +from dataall.modules.catalog.indexers.catalog_indexer import CatalogIndexer +from dataall.modules.dashboards import Dashboard +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer + +log = logging.getLogger(__name__) + + +class DashboardCatalogIndexer(CatalogIndexer): + + def index(self, session) -> int: + all_dashboards: [Dashboard] = session.query(Dashboard).all() + log.info(f'Found {len(all_dashboards)} dashboards') + dashboard: Dashboard + for dashboard in all_dashboards: + DashboardIndexer.upsert(session=session, dashboard_uri=dashboard.dashboardUri) + + return len(all_dashboards) diff --git a/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py new file mode 100644 index 000000000..74d91b591 --- /dev/null +++ b/backend/dataall/modules/dashboards/indexers/dashboard_indexer.py @@ -0,0 +1,49 @@ +import logging + +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.modules.dashboards import DashboardRepository +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer +from dataall.modules.dashboards.db.dashboard_models import Dashboard + +log = logging.getLogger(__name__) + + +class DashboardIndexer(BaseIndexer): + @classmethod + def upsert(cls, session, dashboard_uri: str): + dashboard: Dashboard = DashboardRepository.get_dashboard_by_uri(session, dashboard_uri) + + if dashboard: + env = EnvironmentService.get_environment_by_uri(session, dashboard.environmentUri) + org = Organization.get_organization_by_uri(session, env.organizationUri) + + glossary = BaseIndexer._get_target_glossary_terms(session, dashboard_uri) + count_upvotes = Vote.count_upvotes( + session, dashboard_uri, target_type='dashboard' + ) + BaseIndexer._index( + doc_id=dashboard_uri, + doc={ + 'name': dashboard.name, + 'admins': dashboard.SamlGroupName, + 'owner': dashboard.owner, + 'label': dashboard.label, + 'resourceKind': 'dashboard', + 'description': dashboard.description, + 'tags': [f.replace('-', '') for f in dashboard.tags or []], + 'topics': [], + 'region': dashboard.region.replace('-', ''), + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, + 'created': dashboard.created, + 'updated': dashboard.updated, + 'deleted': dashboard.deleted, + 'glossary': glossary, + 'upvotes': count_upvotes, + }, + ) + return dashboard diff --git a/backend/dataall/modules/dashboards/services/__init__.py b/backend/dataall/modules/dashboards/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dashboards/services/dashboard_permissions.py b/backend/dataall/modules/dashboards/services/dashboard_permissions.py new file mode 100644 index 000000000..089f29457 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_permissions.py @@ -0,0 +1,41 @@ +from dataall.core.permissions.permissions import ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL, TENANT_ALL, \ + TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC + +""" +DASHBOARDS +""" +GET_DASHBOARD = 'GET_DASHBOARD' +UPDATE_DASHBOARD = 'UPDATE_DASHBOARD' +DELETE_DASHBOARD = 'DELETE_DASHBOARD' +SHARE_DASHBOARD = 'SHARE_DASHBOARD' +DASHBOARD_ALL = [ + GET_DASHBOARD, + UPDATE_DASHBOARD, + DELETE_DASHBOARD, + SHARE_DASHBOARD, +] + +RESOURCES_ALL.extend(DASHBOARD_ALL) +for perm in DASHBOARD_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm + +""" +TENANT PERMISSIONS +""" +MANAGE_DASHBOARDS = 'MANAGE_DASHBOARDS' + +TENANT_ALL.append(MANAGE_DASHBOARDS) +TENANT_ALL_WITH_DESC[MANAGE_DASHBOARDS] = 'Manage dashboards' + + +""" +ENVIRONMENT PERMISSIONS +""" +CREATE_DASHBOARD = 'CREATE_DASHBOARD' + + +ENVIRONMENT_INVITED.append(CREATE_DASHBOARD) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_DASHBOARD) +ENVIRONMENT_ALL.append(CREATE_DASHBOARD) +RESOURCES_ALL.append(CREATE_DASHBOARD) +RESOURCES_ALL_WITH_DESC[CREATE_DASHBOARD] = 'Create dashboards on this environment' diff --git a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py new file mode 100644 index 000000000..56bffabc0 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py @@ -0,0 +1,170 @@ +import os + +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.base.aws.sts import SessionHelper +from dataall.base.context import get_context +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.core.permissions.permission_checker import has_resource_permission +from dataall.base.db.exceptions import UnauthorizedOperation, TenantUnauthorized, AWSResourceNotFound +from dataall.core.permissions.permissions import TENANT_ALL +from dataall.modules.dashboards import DashboardRepository, Dashboard +from dataall.modules.dashboards.aws.dashboard_quicksight_client import DashboardQuicksightClient +from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD +from dataall.base.utils import Parameter + + +class DashboardQuicksightService: + _PARAM_STORE = Parameter() + _REGION = os.getenv('AWS_REGION', 'eu-west-1') + + @classmethod + @has_resource_permission(GET_DASHBOARD) + def get_quicksight_reader_url(cls, uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dash: Dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + env = EnvironmentService.get_environment_by_uri(session, dash.environmentUri) + cls._check_dashboards_enabled(session, env, GET_DASHBOARD) + client = cls._client(env.AwsAccountId, env.region) + + if dash.SamlGroupName in context.groups: + return client.get_reader_session( + dashboard_id=dash.DashboardId, + domain_name=DashboardQuicksightService._get_domain_url(), + ) + + else: + shared_groups = DashboardRepository.query_all_user_groups_shareddashboard( + session=session, + groups=context.groups, + uri=uri + ) + if not shared_groups: + raise UnauthorizedOperation( + action=GET_DASHBOARD, + message='Dashboard has not been shared with your Teams', + ) + + session_type = ParameterStoreManager.get_parameter_value( + parameter_path=f"/dataall/{os.getenv('envname', 'local')}/quicksight/sharedDashboardsSessions" + ) + + if session_type == 'reader': + return client.get_shared_reader_session( + group_name=shared_groups[0], + dashboard_id=dash.DashboardId, + ) + else: + return client.get_anonymous_session(dashboard_id=dash.DashboardId) + + @classmethod + @has_resource_permission(CREATE_DASHBOARD) + def get_quicksight_designer_url(cls, uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + env = EnvironmentService.get_environment_by_uri(session, uri) + cls._check_dashboards_enabled(session, env, CREATE_DASHBOARD) + + return cls._client(env.AwsAccountId, env.region).get_author_session() + + @staticmethod + def get_monitoring_dashboard_id(): + current_account = SessionHelper.get_account() + dashboard_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/DashboardId' + ) + + if not dashboard_id: + raise AWSResourceNotFound( + action='GET_DASHBOARD_ID', + message='Dashboard Id could not be found on AWS Parameter Store', + ) + return dashboard_id + + @staticmethod + def get_monitoring_vpc_connection_id(): + current_account = SessionHelper.get_account() + vpc_connection_id = ParameterStoreManager.get_parameter_value( + AwsAccountId=current_account, + region=DashboardQuicksightService._REGION, + parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/VPCConnectionId' + ) + + if not vpc_connection_id: + raise AWSResourceNotFound( + action='GET_VPC_CONNECTION_ID', + message='VPC Connection Id could not be found on AWS Parameter Store', + ) + return vpc_connection_id + + @classmethod + def create_quicksight_data_source_set(cls, vpc_connection_id): + client = cls._client() + client.register_user_in_group(group_name='dataall', user_role='AUTHOR') + + datasource_id = client.create_data_source_vpc(vpc_connection_id=vpc_connection_id) + # Data sets are not created programmatically. Too much overhead for the value added. + # However, an example API is provided: datasets = Quicksight.create_data_set_from_source( + # AwsAccountId=current_account, region=region, UserName='dataallTenantUser', + # dataSourceId=datasourceId, tablesToImport=['organization', + # 'environment', 'dataset', 'datapipeline', 'dashboard', 'share_object'] + # ) + + return datasource_id + + @classmethod + def get_quicksight_author_session(cls, aws_account): + DashboardQuicksightService._check_user_must_be_admin() + return cls._client(aws_account).get_author_session() + + @classmethod + def get_quicksight_reader_session(cls, dashboard_uri): + cls._check_user_must_be_admin() + client = cls._client() + return client.get_reader_session(user_role='READER', dashboard_id=dashboard_uri) + + @staticmethod + def _check_user_must_be_admin(): + context = get_context() + admin = TenantPolicy.is_tenant_admin(context.groups) + + if not admin: + raise TenantUnauthorized( + username=context.username, + action=TENANT_ALL, + tenant_name=context.username, + ) + + @staticmethod + def _get_domain_url(): + envname = os.getenv("envname", "local") + if envname in ["local", "dkrcompose"]: + return "http://localhost:8080" + + domain_name = DashboardQuicksightService._PARAM_STORE.get_parameter( + env=envname, + path="frontend/custom_domain_name" + ) + + return f"https://{domain_name}" + + @staticmethod + def _check_dashboards_enabled(session, env, action): + enabled = EnvironmentService.get_boolean_env_param(session, env, "dashboardsEnabled") + if not enabled: + raise UnauthorizedOperation( + action=action, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + + @classmethod + def _client(cls, account_id: str = None, region: str = None): + if not account_id: + account_id = SessionHelper.get_account() + + if not region: + region = cls._REGION + return DashboardQuicksightClient(get_context().username, account_id, region) diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py new file mode 100644 index 000000000..a78ee0ab1 --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_service.py @@ -0,0 +1,138 @@ +from dataall.base.context import get_context +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_tenant_permission, has_resource_permission +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.base.db.exceptions import UnauthorizedOperation +from dataall.modules.dashboards import DashboardRepository, Dashboard +from dataall.modules.dashboards.aws.dashboard_quicksight_client import DashboardQuicksightClient +from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer +from dataall.modules.dashboards.services.dashboard_permissions import MANAGE_DASHBOARDS, GET_DASHBOARD, \ + UPDATE_DASHBOARD, CREATE_DASHBOARD, DASHBOARD_ALL, DELETE_DASHBOARD + + +class DashboardService: + """Service that serves request related to dashboard""" + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(GET_DASHBOARD) + def get_dashboard(uri: str) -> Dashboard: + with get_context().db_engine.scoped_session() as session: + return DashboardRepository.get_dashboard_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(CREATE_DASHBOARD) + @has_group_permission(CREATE_DASHBOARD) + def import_dashboard(uri: str, admin_group: str, data: dict = None) -> Dashboard: + context = get_context() + with context.db_engine.scoped_session() as session: + env = EnvironmentService.get_environment_by_uri(session, data['environmentUri']) + enabled = EnvironmentService.get_boolean_env_param(session, env, "dashboardsEnabled") + + if not enabled: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'Dashboards feature is disabled for the environment {env.label}', + ) + + aws_client = DashboardQuicksightClient(context.username, env.AwsAccountId, env.region) + can_import = aws_client.can_import_dashboard(data.get('dashboardId')) + + if not can_import: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'User: {context.username} has not AUTHOR rights on quicksight for the environment {env.label}', + ) + + env = data.get( + 'environment', EnvironmentService.get_environment_by_uri(session, uri) + ) + + dashboard = DashboardRepository.create_dashboard(session, env, context.username, data) + + activity = Activity( + action='DASHBOARD:CREATE', + label='DASHBOARD:CREATE', + owner=context.username, + summary=f'{context.username} created dashboard {dashboard.label} in {env.label}', + targetUri=dashboard.dashboardUri, + targetType='dashboard', + ) + session.add(activity) + + DashboardService._set_dashboard_resource_policy( + session, env, dashboard, data['SamlGroupName'] + ) + + DashboardService._update_glossary(session, dashboard, data) + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) + return dashboard + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(UPDATE_DASHBOARD) + def update_dashboard(uri: str, data: dict = None) -> Dashboard: + with get_context().db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + for k in data.keys(): + setattr(dashboard, k, data.get(k)) + + DashboardService._update_glossary(session, dashboard, data) + environment = EnvironmentService.get_environment_by_uri(session, dashboard.environmentUri) + DashboardService._set_dashboard_resource_policy( + session, environment, dashboard, dashboard.SamlGroupName + ) + + DashboardIndexer.upsert(session, dashboard_uri=dashboard.dashboardUri) + return dashboard + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(DELETE_DASHBOARD) + def delete_dashboard(uri) -> bool: + with get_context().db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + DashboardRepository.delete_dashboard(session, dashboard) + + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dashboard.SamlGroupName + ) + Glossary.delete_glossary_terms_links( + session, target_uri=dashboard.dashboardUri, target_type='Dashboard' + ) + Vote.delete_votes(session, dashboard.dashboardUri, 'dashboard') + + DashboardIndexer.delete_doc(doc_id=uri) + return True + + @staticmethod + def _set_dashboard_resource_policy(session, environment, dashboard, group): + DashboardService._attach_dashboard_policy(session, group, dashboard) + if environment.SamlGroupName != dashboard.SamlGroupName: + DashboardService._attach_dashboard_policy(session, environment.SamlGroupName, dashboard) + + @staticmethod + def _attach_dashboard_policy(session, group: str, dashboard: Dashboard): + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=DASHBOARD_ALL, + resource_uri=dashboard.dashboardUri, + resource_type=Dashboard.__name__, + ) + + @staticmethod + def _update_glossary(session, dashboard, data): + context = get_context() + if 'terms' in data: + Glossary.set_glossary_terms_links( + session, + context.username, + dashboard.dashboardUri, + 'Dashboard', + data['terms'], + ) diff --git a/backend/dataall/modules/dashboards/services/dashboard_share_service.py b/backend/dataall/modules/dashboards/services/dashboard_share_service.py new file mode 100644 index 000000000..3e89ddfbe --- /dev/null +++ b/backend/dataall/modules/dashboards/services/dashboard_share_service.py @@ -0,0 +1,123 @@ +from dataall.base.context import get_context +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_tenant_permission, has_resource_permission +from dataall.base.db.exceptions import InvalidInput, UnauthorizedOperation +from dataall.modules.dashboards import DashboardRepository +from dataall.modules.dashboards.db.dashboard_models import DashboardShareStatus, Dashboard +from dataall.modules.dashboards.services.dashboard_permissions import SHARE_DASHBOARD, MANAGE_DASHBOARDS, GET_DASHBOARD, \ + CREATE_DASHBOARD + + +class DashboardShareService: + @staticmethod + def _get_dashboard_uri_by_share_uri(session, uri): + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + dashboard = DashboardRepository.get_dashboard_by_uri(session, share.dashboardUri) + return dashboard.dashboardUri + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + def request_dashboard_share(uri: str, principal_id: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + if dashboard.SamlGroupName == principal_id: + raise UnauthorizedOperation( + action=CREATE_DASHBOARD, + message=f'Team {dashboard.SamlGroupName} is the owner of the dashboard {dashboard.label}', + ) + + share = DashboardRepository.find_share_for_group(session, dashboard.dashboardUri, principal_id) + if not share: + share = DashboardRepository.create_share(session, context.username, dashboard, principal_id) + else: + DashboardShareService._check_share_status(share) + + if share.status == DashboardShareStatus.REJECTED.value: + share.status = DashboardShareStatus.REQUESTED.value + + return share + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD, parent_resource=_get_dashboard_uri_by_share_uri) + def approve_dashboard_share(uri: str): + with get_context().db_engine.scoped_session() as session: + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + DashboardShareService._change_share_status(share, DashboardShareStatus.APPROVED) + DashboardShareService._create_share_policy(session, share.SamlGroupName, share.dashboardUri) + return share + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD, parent_resource=_get_dashboard_uri_by_share_uri) + def reject_dashboard_share(uri: str): + with get_context().db_engine.scoped_session() as session: + share = DashboardRepository.get_dashboard_share_by_uri(session, uri) + DashboardShareService._change_share_status(share, DashboardShareStatus.REJECTED) + + ResourcePolicy.delete_resource_policy( + session=session, + group=share.SamlGroupName, + resource_uri=share.dashboardUri, + resource_type=Dashboard.__name__, + ) + + return share + + @staticmethod + def list_dashboard_shares(uri: str, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return DashboardRepository.paginated_dashboard_shares( + session=session, + username=context.username, + groups=context.groups, + uri=uri, + data=data, + ) + + @staticmethod + @has_tenant_permission(MANAGE_DASHBOARDS) + @has_resource_permission(SHARE_DASHBOARD) + def share_dashboard(uri: str, principal_id: str): + context = get_context() + with context.db_engine.scoped_session() as session: + dashboard = DashboardRepository.get_dashboard_by_uri(session, uri) + share = DashboardRepository.create_share( + session=session, + username=context.username, + dashboard=dashboard, + principal_id=principal_id, + init_status=DashboardShareStatus.APPROVED + ) + + DashboardShareService._create_share_policy(session, principal_id, dashboard.dashboardUri) + return share + + @staticmethod + def _change_share_status(share, status): + DashboardShareService._check_share_status(share) + if share.status == status.value: + return share + + share.status = status.value + + @staticmethod + def _check_share_status(share): + if share.status not in DashboardShareStatus.__members__: + raise InvalidInput( + 'Share status', + share.status, + str(DashboardShareStatus.__members__), + ) + + @staticmethod + def _create_share_policy(session, principal_id, dashboard_uri): + ResourcePolicy.attach_resource_policy( + session=session, + group=principal_id, + permissions=[GET_DASHBOARD], + resource_uri=dashboard_uri, + resource_type=Dashboard.__name__, + ) diff --git a/backend/dataall/modules/datapipelines/__init__.py b/backend/dataall/modules/datapipelines/__init__.py new file mode 100644 index 000000000..1596f2df8 --- /dev/null +++ b/backend/dataall/modules/datapipelines/__init__.py @@ -0,0 +1,70 @@ +"""Contains the code related to datapipelines""" +import logging +from typing import List, Type + +from dataall.base.loader import ModuleInterface, ImportMode +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository +from dataall.modules.datapipelines.services.datapipelines_permissions import \ + GET_PIPELINE, UPDATE_PIPELINE + +log = logging.getLogger(__name__) + + +class DatapipelinesApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for datapipelines GraphQl lambda""" + + @staticmethod + def is_supported(modes): + return ImportMode.API in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.feed import FeedApiModuleInterface + + return [FeedApiModuleInterface] + + def __init__(self): + # these imports are placed inside the method because they are only related to GraphQL api. + from dataall.core.stacks.db.target_type_repositories import TargetType + from dataall.modules.feed.api.registry import FeedRegistry, FeedDefinition + + import dataall.modules.datapipelines.api + FeedRegistry.register(FeedDefinition("DataPipeline", DataPipeline)) + + TargetType("pipeline", GET_PIPELINE, UPDATE_PIPELINE) + + EnvironmentResourceManager.register(DatapipelinesRepository()) + + log.info("API of datapipelines has been imported") + + +class DatapipelinesAsyncHandlersModuleInterface(ModuleInterface): + """Implements ModuleInterface for datapipelines async lambda""" + + @staticmethod + def is_supported(modes: List[ImportMode]): + return ImportMode.HANDLERS in modes + + def __init__(self): + import dataall.modules.datapipelines.handlers + log.info("Datapipelines handlers have been imported") + + +class DatapipelinesCdkModuleInterface(ModuleInterface): + """Loads datapipelines cdk stacks """ + + @staticmethod + def is_supported(modes: List[ImportMode]): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.datapipelines.cdk + from dataall.base.cdkproxy.cdk_cli_wrapper import _CDK_CLI_WRAPPER_EXTENSIONS + from dataall.modules.datapipelines.cdk.datapipelines_cdk_cli_wrapper_extension import \ + DatapipelinesCDKCliWrapperExtension + + _CDK_CLI_WRAPPER_EXTENSIONS['cdkpipeline'] = DatapipelinesCDKCliWrapperExtension() + + log.info("Datapipelines stacks have been imported") diff --git a/backend/dataall/modules/datapipelines/api/__init__.py b/backend/dataall/modules/datapipelines/api/__init__.py new file mode 100644 index 000000000..c54c7e8d3 --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.datapipelines.api import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/datapipelines/api/enums.py b/backend/dataall/modules/datapipelines/api/enums.py new file mode 100644 index 000000000..2d1710607 --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/enums.py @@ -0,0 +1,7 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class DataPipelineRole(GraphQLEnumMapper): + Creator = '999' + Admin = '900' + NoPermission = '000' diff --git a/backend/dataall/modules/datapipelines/api/input_types.py b/backend/dataall/modules/datapipelines/api/input_types.py new file mode 100644 index 000000000..3d6b4556a --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/input_types.py @@ -0,0 +1,75 @@ +from dataall.base.api import gql + +NewDataPipelineInput = gql.InputType( + name='NewDataPipelineInput', + arguments=[ + gql.Argument(name='label', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='SamlGroupName', type=gql.NonNullableType(gql.String)), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='devStrategy', type=gql.NonNullableType(gql.String)), + ], +) + +NewDataPipelineEnvironmentInput = gql.InputType( + name='NewDataPipelineEnvironmentInput', + arguments=[ + gql.Argument(name='stage', type=gql.NonNullableType(gql.String)), + gql.Argument(name='order', type=gql.NonNullableType(gql.Integer)), + gql.Argument(name='pipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentLabel', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='samlGroupName', type=gql.NonNullableType(gql.String)), + ], +) + +UpdateDataPipelineInput = gql.InputType( + name='UpdateDataPipelineInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + ], +) + +DataPipelineFilter = gql.InputType( + name='DataPipelineFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='region', type=gql.ArrayType(gql.String)), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='type', type=gql.ArrayType(gql.String)), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + +DataPipelineEnvironmentFilter = gql.InputType( + name='DataPipelineEnvironmentFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + gql.Argument(name='pipelineUri', type=gql.String), + ], +) + +DataPipelineBrowseInput = gql.InputType( + name='DataPipelineBrowseInput', + arguments=[ + gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='branch', type=gql.NonNullableType(gql.String)), + gql.Argument(name='folderPath', type=gql.String), + ], +) + + +DataPipelineFileContentInput = gql.InputType( + name='DataPipelineFileContentInput', + arguments=[ + gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='branch', type=gql.NonNullableType(gql.String)), + gql.Argument(name='absolutePath', type=gql.NonNullableType(gql.String)), + ], +) diff --git a/backend/dataall/modules/datapipelines/api/mutations.py b/backend/dataall/modules/datapipelines/api/mutations.py new file mode 100644 index 000000000..487adc8b3 --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/mutations.py @@ -0,0 +1,65 @@ +from dataall.base.api import gql +from dataall.modules.datapipelines.api.resolvers import create_pipeline, update_pipeline, delete_pipeline, \ + create_pipeline_environment, delete_pipeline_environment, update_pipeline_environment + +createDataPipeline = gql.MutationField( + name='createDataPipeline', + type=gql.Ref('DataPipeline'), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineInput')) + ) + ], + resolver=create_pipeline, +) + +updateDataPipeline = gql.MutationField( + name='updateDataPipeline', + type=gql.Ref('DataPipeline'), + args=[ + gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('UpdateDataPipelineInput')), + ], + resolver=update_pipeline, +) + +deleteDataPipeline = gql.MutationField( + name='deleteDataPipeline', + type=gql.Boolean, + args=[ + gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='deleteFromAWS', type=gql.Boolean), + ], + resolver=delete_pipeline, +) + +createDataPipelineEnvironment = gql.MutationField( + name='createDataPipelineEnvironment', + type=gql.Ref('DataPipelineEnvironment'), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineEnvironmentInput')) + ) + ], + resolver=create_pipeline_environment, +) + +deleteDataPipelineEnvironment = gql.MutationField( + name='deleteDataPipelineEnvironment', + type=gql.Boolean, + args=[ + gql.Argument(name='envPipelineUri', type=gql.NonNullableType(gql.String)) + ], + resolver=delete_pipeline_environment, +) + +updateDataPipelineEnvironment = gql.MutationField( + name='updateDataPipelineEnvironment', + type=gql.Ref('DataPipelineEnvironment'), + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('NewDataPipelineEnvironmentInput')) + ) + ], + resolver=update_pipeline_environment, +) diff --git a/backend/dataall/modules/datapipelines/api/queries.py b/backend/dataall/modules/datapipelines/api/queries.py new file mode 100644 index 000000000..3b964169c --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/queries.py @@ -0,0 +1,65 @@ +from dataall.base.api import gql +from dataall.modules.datapipelines.api.resolvers import list_pipelines, get_pipeline, ls, list_branches, cat, \ + get_creds, list_pipeline_environments, get_pipeline_environment + +listDataPipelines = gql.QueryField( + name='listDataPipelines', + args=[gql.Argument(name='filter', type=gql.Ref('DataPipelineFilter'))], + resolver=list_pipelines, + type=gql.Ref('DataPipelineSearchResults'), +) + +getDataPipeline = gql.QueryField( + name='getDataPipeline', + args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DataPipeline'), + resolver=get_pipeline, +) + + +browseDataPipelineRepository = gql.QueryField( + name='browseDataPipelineRepository', + args=[ + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('DataPipelineBrowseInput')) + ) + ], + resolver=ls, + type=gql.String, +) + +listDataPipelineBranches = gql.QueryField( + name='listDataPipelineBranches', + args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], + resolver=list_branches, + type=gql.ArrayType(gql.String), +) + + +getDataPipelineFileContent = gql.QueryField( + name='getDataPipelineFileContent', + args=[gql.Argument(name='input', type=gql.Ref('DataPipelineFileContentInput'))], + resolver=cat, + type=gql.String, +) + +getDataPipelineCredsLinux = gql.QueryField( + name='getDataPipelineCredsLinux', + args=[gql.Argument(name='DataPipelineUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_creds, +) + +listDataPipelineEnvironments = gql.QueryField( + name='listDataPipelineEnvironments', + args=[gql.Argument(name='filter', type=gql.Ref('DataPipelineEnvironmentFilter'))], + resolver=list_pipeline_environments, + type=gql.Ref('DataPipelineEnvironmentSearchResults'), +) + +getDataPipelineEnvironment = gql.QueryField( + name='getDataPipelineEnvironment', + args=[gql.Argument(name='envPipelineUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DataPipelineEnvironment'), + resolver=get_pipeline_environment, +) diff --git a/backend/dataall/modules/datapipelines/api/resolvers.py b/backend/dataall/modules/datapipelines/api/resolvers.py new file mode 100644 index 000000000..1c734a4ac --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/resolvers.py @@ -0,0 +1,310 @@ +import json +import logging + +from dataall.base.api.context import Context +from dataall.core.tasks.service_handlers import Worker +from dataall.base.context import get_context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.tasks.db.task_models import Task +from dataall.base.db import exceptions +from dataall.modules.datapipelines.api.enums import DataPipelineRole +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository +from dataall.modules.datapipelines.services.datapipelines_service import DataPipelineService + +log = logging.getLogger(__name__) + + +def create_pipeline(context: Context, source, input=None): + _validate_input(input) + + with context.engine.scoped_session() as session: + pipeline = DataPipelineService.create_pipeline( + session=session, + admin_group=input['SamlGroupName'], + username=context.username, + uri=input['environmentUri'], + data=input, + ) + if input['devStrategy'] == 'cdk-trunk': + Stack.create_stack( + session=session, + environment_uri=pipeline.environmentUri, + target_type='cdkpipeline', + target_uri=pipeline.DataPipelineUri, + target_label=pipeline.label, + payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, + ) + else: + Stack.create_stack( + session=session, + environment_uri=pipeline.environmentUri, + target_type='pipeline', + target_uri=pipeline.DataPipelineUri, + target_label=pipeline.label, + payload={'account': pipeline.AwsAccountId, 'region': pipeline.region}, + ) + + stack_helper.deploy_stack(pipeline.DataPipelineUri) + + return pipeline + + +def create_pipeline_environment(context: Context, source, input=None): + with context.engine.scoped_session() as session: + pipeline_env = DataPipelineService.create_pipeline_environment( + session=session, + admin_group=input['samlGroupName'], + uri=input['environmentUri'], + username=context.username, + data=input, + ) + return pipeline_env + + +def update_pipeline(context: Context, source, DataPipelineUri: str, input: dict = None): + with context.engine.scoped_session() as session: + pipeline = DataPipelineService.update_pipeline( + session=session, + uri=DataPipelineUri, + data=input, + ) + if (pipeline.template == ""): + stack_helper.deploy_stack(pipeline.DataPipelineUri) + + return pipeline + + +def list_pipelines(context: Context, source, filter: dict = None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatapipelinesRepository.paginated_user_pipelines( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + +def get_pipeline(context: Context, source, DataPipelineUri: str = None): + with context.engine.scoped_session() as session: + return DataPipelineService.get_pipeline( + session=session, + uri=DataPipelineUri, + ) + + +def resolve_user_role(context: Context, source: DataPipeline): + if not source: + return None + if context.username and source.owner == context.username: + return DataPipelineRole.Creator.value + elif context.groups and source.SamlGroupName in context.groups: + return DataPipelineRole.Admin.value + return DataPipelineRole.NoPermission.value + + +def get_pipeline_environment(context: Context, source: DataPipelineEnvironment, **kwargs): + with context.engine.scoped_session() as session: + return DataPipelineService.get_pipeline_environment( + session=session, + uri=source.envPipelineUri, + ) + + +def list_pipeline_environments(context: Context, source: DataPipeline, filter: dict = None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return DatapipelinesRepository.paginated_pipeline_environments( + session=session, + uri=source.DataPipelineUri, + data=filter + ) + + +def get_clone_url_http(context: Context, source: DataPipeline, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + return DatapipelinesRepository.get_clone_url_http( + session=session, + environmentUri=source.environmentUri, + repo=source.repo + ) + + +def cat(context: Context, source, input: dict = None): + with context.engine.scoped_session() as session: + try: + response = DataPipelineService.cat( + session=session, + input=input + ) + except Exception as e: + log.error(f"Failed to execute task due to: {e}") + + return response[0]['response'].decode('ascii') + + +def ls(context: Context, source, input: dict = None): + with context.engine.scoped_session() as session: + try: + response = DataPipelineService.ls( + session=session, + input=input + ) + except Exception as e: + log.error(f"Failed to execute task due to: {e}") + + return json.dumps(response[0]['response']) + + +def list_branches(context: Context, source, DataPipelineUri: str = None): + with context.engine.scoped_session() as session: + try: + response = DataPipelineService.list_branches( + session=session, + datapipeline_uri=DataPipelineUri + ) + except Exception as e: + log.error(f"Failed to execute task due to: {e}") + + return response[0]['response'] + + +def get_stack(context, source: DataPipeline, **kwargs): + if not source: + return None + return stack_helper.get_stack_with_cfn_resources( + targetUri=source.DataPipelineUri, + environmentUri=source.environmentUri, + ) + + +def get_job_runs(context, source: DataPipeline, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + try: + response = DataPipelineService.get_job_runs( + session=session, + datapipeline_uri=source.DataPipelineUri + ) + except Exception as e: + log.error(f"Failed to execute task due to: {e}") + + return response[0]['response'] + + +def get_pipeline_executions(context: Context, source: DataPipeline, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + try: + response = DataPipelineService.get_pipeline_execution( + session=session, + datapipeline_uri=source.DataPipelineUri + ) + except Exception as e: + log.error(f"Failed to find pipeline execution for {source.DataPipelineUri}. Error {e}") + + return response[0]['response'] + + +def get_creds(context: Context, source, DataPipelineUri: str = None): + with context.engine.scoped_session() as session: + return DataPipelineService.get_credentials( + session=session, + uri=DataPipelineUri + ) + + +def _delete_repository( + target_uri, accountid, cdk_role_arn, region, repo_name +): + context = get_context() + with context.db_engine.scoped_session() as session: + task = Task( + targetUri=target_uri, + action='repo.datapipeline.delete', + payload={ + 'accountid': accountid, + 'region': region, + 'cdk_role_arn': cdk_role_arn, + 'repo_name': repo_name, + }, + ) + session.add(task) + Worker.queue(context.db_engine, [task.taskUri]) + + return True + + +def delete_pipeline( + context: Context, source, DataPipelineUri: str = None, deleteFromAWS: bool = None +): + with context.engine.scoped_session() as session: + pipeline: DataPipeline = DatapipelinesRepository.get_pipeline_by_uri( + session, DataPipelineUri + ) + env: Environment = EnvironmentService.get_environment_by_uri( + session, pipeline.environmentUri + ) + + DataPipelineService.delete_pipeline( + session=session, + uri=DataPipelineUri, + pipeline=pipeline + ) + + if deleteFromAWS: + _delete_repository( + target_uri=DataPipelineUri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region, + repo_name=pipeline.repo, + ) + stack_helper.delete_stack( + target_uri=DataPipelineUri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region, + ) + + return True + + +def delete_pipeline_environment(context: Context, source, envPipelineUri: str = None): + with context.engine.scoped_session() as session: + DatapipelinesRepository.delete_pipeline_environment( + session=session, + envPipelineUri=envPipelineUri + ) + return True + + +def update_pipeline_environment(context: Context, source, input=None): + with context.engine.scoped_session() as session: + pipeline_env = DataPipelineService.update_pipeline_environment( + session=session, + data=input, + uri=input['pipelineUri'], + ) + return pipeline_env + + +def _validate_input(data): + if not data: + raise exceptions.RequiredParameter(data) + if not data.get('environmentUri'): + raise exceptions.RequiredParameter('environmentUri') + if not data.get('SamlGroupName'): + raise exceptions.RequiredParameter('group') + if not data.get('label'): + raise exceptions.RequiredParameter('label') diff --git a/backend/dataall/modules/datapipelines/api/types.py b/backend/dataall/modules/datapipelines/api/types.py new file mode 100644 index 000000000..424eafccd --- /dev/null +++ b/backend/dataall/modules/datapipelines/api/types.py @@ -0,0 +1,85 @@ +from dataall.base.api import gql +from dataall.modules.datapipelines.api.resolvers import list_pipeline_environments, \ + get_clone_url_http, get_stack, resolve_user_role +from dataall.modules.datapipelines.api.enums import DataPipelineRole +from dataall.core.environment.api.resolvers import resolve_environment +from dataall.core.organizations.api.resolvers import resolve_organization_by_env + +DataPipeline = gql.ObjectType( + name='DataPipeline', + fields=[ + gql.Field('DataPipelineUri', type=gql.ID), + gql.Field('name', type=gql.String), + gql.Field('label', type=gql.String), + gql.Field('description', type=gql.String), + gql.Field('tags', type=gql.ArrayType(gql.String)), + gql.Field('created', type=gql.String), + gql.Field('updated', type=gql.String), + gql.Field('owner', type=gql.String), + gql.Field('repo', type=gql.String), + gql.Field('SamlGroupName', type=gql.String), + gql.Field( + 'organization', type=gql.Ref('Organization'), resolver=resolve_organization_by_env + ), + gql.Field( + 'environment', type=gql.Ref('Environment'), resolver=resolve_environment + ), + gql.Field( + 'developmentEnvironments', + type=gql.Ref('DataPipelineEnvironmentSearchResults'), + resolver=list_pipeline_environments, + ), + gql.Field('template', type=gql.String), + gql.Field('devStrategy', type=gql.String), + gql.Field('cloneUrlHttp', gql.String, resolver=get_clone_url_http), + gql.Field('stack', gql.Ref('Stack'), resolver=get_stack), + # gql.Field('cicdStack', gql.Ref('Stack'), resolver=get_cicd_stack), + gql.Field( + 'userRoleForPipeline', + type=DataPipelineRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + ], +) + +DataPipelineSearchResults = gql.ObjectType( + name='DataPipelineSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(DataPipeline)), + ], +) + + +DataPipelineEnvironment = gql.ObjectType( + name='DataPipelineEnvironment', + fields=[ + gql.Field(name='envPipelineUri', type=gql.String), + gql.Field(name='environmentUri', type=gql.String), + gql.Field(name='environmentLabel', type=gql.String), + gql.Field(name='pipelineUri', type=gql.String), + gql.Field(name='pipelineLabel', type=gql.String), + gql.Field(name='stage', type=gql.String), + gql.Field(name='order', type=gql.Integer), + gql.Field(name='region', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='samlGroupName', type=gql.String), + ], +) + + +DataPipelineEnvironmentSearchResults = gql.ObjectType( + name='DataPipelineEnvironmentSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(DataPipelineEnvironment)), + ], +) diff --git a/backend/dataall/modules/datapipelines/aws/__init__.py b/backend/dataall/modules/datapipelines/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datapipelines/aws/codecommit_datapipeline_client.py b/backend/dataall/modules/datapipelines/aws/codecommit_datapipeline_client.py new file mode 100644 index 000000000..330f17e9b --- /dev/null +++ b/backend/dataall/modules/datapipelines/aws/codecommit_datapipeline_client.py @@ -0,0 +1,68 @@ +from dataall.base.aws.sts import SessionHelper + + +class DatapipelineCodecommitClient: + def __init__(self, aws_account_id, region) -> None: + self._session = SessionHelper.remote_session(aws_account_id) + self._client = self._session.client('codecommit', region_name=region) + + def get_file_content(self, repository, commit_specifier, file_path): + response = self._client.get_file( + repositoryName=repository, + commitSpecifier=commit_specifier, + filePath=file_path, + ) + return response['fileContent'] + + def get_folder_content(self, repository, commit_specifier, folder_path): + response = self._client.get_folder( + repositoryName=repository, + commitSpecifier=commit_specifier, + folderPath=folder_path, + ) + nodes = [] + for sub_folder in response['subFolders']: + get_folder_response = self._client.get_folder( + repositoryName=repository, + commitSpecifier=commit_specifier, + folderPath=sub_folder['absolutePath'], + ) + get_commit = self._client.get_commit( + repositoryName=repository, commitId=get_folder_response['commitId'] + ) + commit = get_commit['commit'] + nodes.append( + { + 'type': 'folder', + 'author': commit['author'], + 'relativePath': sub_folder['relativePath'], + 'absolutePath': sub_folder['absolutePath'], + } + ) + for file in response['files']: + get_file_response = self._client.get_file( + repositoryName=repository, + commitSpecifier=commit_specifier, + filePath=file['absolutePath'], + ) + get_commit = self._client.get_commit( + repositoryName=repository, commitId=get_file_response['commitId'] + ) + commit = get_commit['commit'] + nodes.append( + { + 'type': 'file', + 'author': commit['author'], + 'relativePath': file['relativePath'], + 'absolutePath': file['absolutePath'], + } + ) + return nodes + + def list_branches(self, repository): + response = self._client.list_branches(repositoryName=repository) + return response['branches'] + + def delete_repository(self, repository): + _ = self._client.delete_repository(repositoryName=repository) + return True diff --git a/backend/dataall/modules/datapipelines/aws/codepipeline_datapipeline_client.py b/backend/dataall/modules/datapipelines/aws/codepipeline_datapipeline_client.py new file mode 100644 index 000000000..8660bb7aa --- /dev/null +++ b/backend/dataall/modules/datapipelines/aws/codepipeline_datapipeline_client.py @@ -0,0 +1,29 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger('aws:codepipeline') + + +class CodepipelineDatapipelineClient: + def __init__(self, aws_account_id, region) -> None: + self._aws_account_id = aws_account_id + self._region = region + self._session = SessionHelper.remote_session(aws_account_id) + self._client = self._session.client('codepipeline', region_name=region) + + def get_pipeline_execution_summaries(self, codepipeline_name): + executions = [] + try: + response = self._client.list_pipeline_executions( + pipelineName=codepipeline_name + ) + executions = response['pipelineExecutionSummaries'] + except ClientError as e: + log.warning( + f'Could not retrieve pipeline executions for {codepipeline_name} aws://{self._aws_account_id}:{self._region}' + ) + + return executions diff --git a/backend/dataall/modules/datapipelines/aws/glue_datapipeline_client.py b/backend/dataall/modules/datapipelines/aws/glue_datapipeline_client.py new file mode 100644 index 000000000..a24792ec6 --- /dev/null +++ b/backend/dataall/modules/datapipelines/aws/glue_datapipeline_client.py @@ -0,0 +1,21 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger('aws:glue') + + +class GlueDatapipelineClient: + def __init__(self, aws_account_id, region) -> None: + self._session = SessionHelper.remote_session(aws_account_id) + self._client = self._session.client('glue', region_name=region) + + def get_job_runs(self, datapipeline_job_name): + try: + response = self._client.get_job_runs(JobName=datapipeline_job_name) + except ClientError as e: + log.warning(f'Could not retrieve pipeline runs , {str(e)}') + return [] + return response['JobRuns'] diff --git a/backend/dataall/cdkproxy/blueprints/cookiecutter_config.yaml b/backend/dataall/modules/datapipelines/blueprints/cookiecutter_config.yaml similarity index 100% rename from backend/dataall/cdkproxy/blueprints/cookiecutter_config.yaml rename to backend/dataall/modules/datapipelines/blueprints/cookiecutter_config.yaml diff --git a/backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/app_multiaccount.py b/backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/app_multiaccount.py similarity index 100% rename from backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/app_multiaccount.py rename to backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/app_multiaccount.py diff --git a/backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/ddk_app/__init__.py b/backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/ddk_app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/ddk_app/ddk_app_stack_multiaccount.py b/backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/ddk_app/ddk_app_stack_multiaccount.py similarity index 100% rename from backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/ddk_app/ddk_app_stack_multiaccount.py rename to backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/ddk_app/ddk_app_stack_multiaccount.py diff --git a/backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/utils/config.py b/backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/utils/config.py similarity index 100% rename from backend/dataall/cdkproxy/blueprints/data_pipeline_blueprint/utils/config.py rename to backend/dataall/modules/datapipelines/blueprints/data_pipeline_blueprint/utils/config.py diff --git a/backend/dataall/modules/datapipelines/cdk/__init__.py b/backend/dataall/modules/datapipelines/cdk/__init__.py new file mode 100644 index 000000000..041290c87 --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/__init__.py @@ -0,0 +1,13 @@ +from dataall.modules.datapipelines.cdk import datapipelines_cdk_cli_wrapper_extension, datapipelines_cdk_pipeline, \ + env_role_datapipelines_stepfunctions_policy, env_role_datapipelines_lambda_policy, env_role_datapipelines_cicd_policy, datapipelines_pipeline, \ + pivot_role_datapipelines_policy + +__all__ = [ + 'datapipelines_cdk_cli_wrapper_extension', + 'datapipelines_cdk_pipeline', + 'env_role_datapipelines_stepfunctions_policy', + 'env_role_datapipelines_lambda_policy', + 'env_role_datapipelines_cicd_policy', + 'datapipelines_pipeline', + 'pivot_role_datapipelines_policy' +] diff --git a/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_cli_wrapper_extension.py b/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_cli_wrapper_extension.py new file mode 100644 index 000000000..153e4e37e --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_cli_wrapper_extension.py @@ -0,0 +1,47 @@ +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.base.cdkproxy.cdk_cli_wrapper import CDKCliWrapperExtension, \ + describe_stack, update_stack_output +from dataall.modules.datapipelines.cdk.datapipelines_cdk_pipeline import CDKPipelineStack +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository + + +logger = logging.getLogger('cdksass') + + +class DatapipelinesCDKCliWrapperExtension(CDKCliWrapperExtension): + def __init__(self): + pass + + def extend_deployment(self, stack, session, env): + cdkpipeline = CDKPipelineStack(stack.targetUri) + venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None + self.pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, stack.targetUri) + path = f'./cdkpipeline/{self.pipeline.repo}/' + if not venv_name: + logger.info('Successfully Updated CDK Pipeline') + meta = describe_stack(stack) + stack.stackid = meta['StackId'] + stack.status = meta['StackStatus'] + update_stack_output(session, stack) + return True, path + + aws = SessionHelper.remote_session(stack.accountid) + creds = aws.get_credentials() + env.update( + { + 'CDK_DEFAULT_REGION': stack.region, + 'AWS_REGION': stack.region, + 'AWS_DEFAULT_REGION': stack.region, + 'CDK_DEFAULT_ACCOUNT': stack.accountid, + 'AWS_ACCESS_KEY_ID': creds.access_key, + 'AWS_SECRET_ACCESS_KEY': creds.secret_key, + 'AWS_SESSION_TOKEN': creds.token, + } + ) + + return False, path + + def cleanup(self): + CDKPipelineStack.clean_up_repo(path=f'./{self.pipeline.repo}') diff --git a/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_pipeline.py b/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_pipeline.py new file mode 100644 index 000000000..b5570d8fc --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/datapipelines_cdk_pipeline.py @@ -0,0 +1,284 @@ +import logging +import os +import sys +import subprocess + +from botocore.exceptions import ClientError + +from dataall.base import db +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository + + +logger = logging.getLogger(__name__) + + +class CDKPipelineStack: + """ + Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. + + The pipeline is based on AWS DDK CICD CodePipeline pipelines + + - Defaults for source/synth - CodeCommit & cdk synth + - blueprint with DDK application code added in the CodeCommit repository + - ability to define development stages: dev, test, prod + - Ability to connect to private artifactory to pull artifacts from at synth + - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys + - data.all metadata as environment variables accesible at synth + + """ + def get_engine(self): + envname = os.environ.get("envname", "local") + engine = db.get_engine(envname=envname) + return engine + + module_name = __file__ + + def __init__(self, target_uri): + engine = self.get_engine() + with engine.scoped_session() as session: + + self.pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, target_uri) + self.pipeline_environment = EnvironmentService.get_environment_by_uri(session, self.pipeline.environmentUri) + # Development environments + self.development_environments = DatapipelinesRepository.query_pipeline_environments(session, target_uri) + + self.env, aws = CDKPipelineStack._set_env_vars(self.pipeline_environment) + + self.code_dir_path = os.path.dirname(os.path.abspath(__file__)) + + try: + codecommit_client = aws.client('codecommit', region_name=self.pipeline.region) + repository = CDKPipelineStack._check_repository(codecommit_client, self.pipeline.repo) + if repository: + self.venv_name = None + self.code_dir_path = os.path.realpath( + os.path.abspath( + os.path.join( + __file__, "..", "..", "blueprints", "data_pipeline_blueprint" + ) + ) + ) + CDKPipelineStack.write_ddk_json_multienvironment(path=self.code_dir_path, output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) + CDKPipelineStack.write_ddk_app_multienvironment(path=self.code_dir_path, output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) + + logger.info(f"Pipeline Repo {self.pipeline.repo} Exists...Handling Update") + update_cmds = [ + f'REPO_NAME={self.pipeline.repo}', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://app.py --file-path app.py --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + ] + + process = subprocess.run( + "; ".join(update_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env + ) + else: + raise Exception + except Exception as e: + self.venv_name = self.initialize_repo() + CDKPipelineStack.write_ddk_app_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="app.py", pipeline=self.pipeline, development_environments=self.development_environments) + CDKPipelineStack.write_ddk_json_multienvironment(path=os.path.join(self.code_dir_path, self.pipeline.repo), output_file="ddk.json", pipeline_environment=self.pipeline_environment, development_environments=self.development_environments) + self.git_push_repo() + + def initialize_repo(self): + venv_name = ".venv" + cmd_init = [ + f"ddk init {self.pipeline.repo} --generate-only", + f"cd {self.pipeline.repo}", + "git init --initial-branch main", + f"ddk create-repository {self.pipeline.repo} -t application dataall -t team {self.pipeline.SamlGroupName}" + ] + + logger.info(f"Running Commands: {'; '.join(cmd_init)}") + + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=self.code_dir_path, + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") + + return venv_name + + @staticmethod + def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): + json_envs = "" + for env in development_environments: + json_env = f""", + "{env.stage}": {{ + "account": "{env.AwsAccountId}", + "region": "{env.region}", + "resources": {{ + "ddk-bucket": {{"versioned": false, "removal_policy": "destroy"}} + }} + }}""" + json_envs = json_envs + json_env + + json = f"""{{ + "environments": {{ + "cicd": {{ + "account": "{pipeline_environment.AwsAccountId}", + "region": "{pipeline_environment.region}" + }}{json_envs} + }} +}}""" + + with open(f'{path}/{output_file}', 'w') as text_file: + print(json, file=text_file) + + @staticmethod + def write_ddk_app_multienvironment(path, output_file, pipeline, development_environments): + header = f""" +# !/usr/bin/env python3 + +import aws_cdk as cdk +from aws_ddk_core.cicd import CICDPipelineStack +from ddk_app.ddk_app_stack import DdkApplicationStack +from aws_ddk_core.config import Config + +app = cdk.App() + +class ApplicationStage(cdk.Stage): + def __init__( + self, + scope, + environment_id: str, + **kwargs, + ) -> None: + super().__init__(scope, f"dataall-{{environment_id.title()}}", **kwargs) + DdkApplicationStack(self, "DataPipeline-{pipeline.label}-{pipeline.DataPipelineUri}", environment_id) + +id = f"dataall-cdkpipeline-{pipeline.DataPipelineUri}" +config = Config() +( + CICDPipelineStack( + app, + id=id, + environment_id="cicd", + pipeline_name="{pipeline.label}", + ) + .add_source_action(repository_name="{pipeline.repo}") + .add_synth_action() + .build()""" + + stages = "" + for env in sorted(development_environments, key=lambda env: env.order): + stage = f""".add_stage("{env.stage}", ApplicationStage(app, "{env.stage}", env=config.get_env("{env.stage}")))""" + stages = stages + stage + footer = """ + .synth() +) + +app.synth() +""" + app = header + stages + footer + + with open(f'{path}/{output_file}', 'w') as text_file: + print(app, file=text_file) + + def git_push_repo(self): + git_cmds = [ + 'git config user.email "codebuild@example.com"', + 'git config user.name "CodeBuild"', + 'git config --local credential.helper "!aws codecommit credential-helper $@"', + "git config --local credential.UseHttpPath true", + "git add .", + "git commit -a -m 'Initial Commit' ", + "git push -u origin main" + ] + + logger.info(f"Running Commands: {'; '.join(git_cmds)}") + + process = subprocess.run( + '; '.join(git_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=os.path.join(self.code_dir_path, self.pipeline.repo), + env=self.env + ) + if process.returncode == 0: + logger.info("Successfully Pushed DDK App Code") + + @staticmethod + def clean_up_repo(path): + if path: + precmd = [ + 'deactivate;', + 'rm', + '-rf', + f"{path}" + ] + + cwd = os.path.dirname(os.path.abspath(__file__)) + logger.info(f"Running command : \n {' '.join(precmd)}") + + process = subprocess.run( + ' '.join(precmd), + text=True, + shell=True, # nosec + encoding='utf-8', + capture_output=True, + cwd=cwd + ) + + if process.returncode == 0: + print(f"Successfully cleaned cloned repo: {path}. {str(process.stdout)}") + else: + logger.error( + f'Failed clean cloned repo: {path} due to {str(process.stderr)}' + ) + else: + logger.info(f"Info:Path {path} not found") + return + + @staticmethod + def _check_repository(codecommit_client, repo_name): + repository = None + logger.info(f"Checking Repository Exists: {repo_name}") + try: + repository = codecommit_client.get_repository(repositoryName=repo_name) + except ClientError as e: + if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': + logger.debug(f'Repository does not exists {repo_name} %s', e) + else: + raise e + return repository if repository else None + + @staticmethod + def _set_env_vars(pipeline_environment): + aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + python_path = '/:'.join(sys.path)[1:] + ':/code' + os.getenv('PATH') + + env = { + 'AWS_REGION': pipeline_environment.region, + 'AWS_DEFAULT_REGION': pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, + 'PYTHONPATH': python_path, + 'PATH': python_path, + 'envname': os.environ.get('envname', 'local'), + 'COOKIECUTTER_CONFIG': "/dataall/modules/datapipelines/blueprints/cookiecutter_config.yaml", + } + if env_creds: + env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } + ) + return env, aws diff --git a/backend/dataall/modules/datapipelines/cdk/datapipelines_pipeline.py b/backend/dataall/modules/datapipelines/cdk/datapipelines_pipeline.py new file mode 100644 index 000000000..f58ac462e --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/datapipelines_pipeline.py @@ -0,0 +1,570 @@ +import logging +import os +import shutil +import subprocess +from typing import List + +from aws_cdk import aws_codebuild as codebuild, Stack, RemovalPolicy, CfnOutput +from aws_cdk import aws_codecommit as codecommit +from aws_cdk import aws_codepipeline as codepipeline +from aws_cdk import aws_codepipeline_actions as codepipeline_actions +from aws_cdk import aws_iam as iam +from aws_cdk import aws_kms as kms +from aws_cdk.aws_s3_assets import Asset +from botocore.exceptions import ClientError + +from dataall.base import db +from dataall.base.aws.sts import SessionHelper +from dataall.base.cdkproxy.stacks.manager import stack +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.stacks.services.runtime_stacks_tagging import TagsUtil +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository +from dataall.base.utils.cdk_nag_utils import CDKNagUtil + +logger = logging.getLogger(__name__) + + +@stack("pipeline") +class PipelineStack(Stack): + """ + Create a stack that contains CDK Continuous Integration and Delivery (CI/CD) pipeline. + The pipeline is based on CodePipeline pipelines + - Defaults for source/synth - CodeCommit & cdk synth + - blueprint with DDK application code added in the CodeCommit repository + - ability to define development stages: dev, test, prod + - ability to select gitflow or trunk-based as development strategy + - Ability to connect to private artifactory to pull artifacts from at synth + - Security best practices - ensures pipeline buckets block non-SSL, and are KMS-encrypted with rotated keys + - data.all metadata as environment variables accesible at synth + """ + + module_name = __file__ + + def get_engine(self): + envname = os.environ.get("envname", "local") + engine = db.get_engine(envname=envname) + return engine + + def get_target(self, target_uri) -> DataPipeline: + engine = self.get_engine() + with engine.scoped_session() as session: + return DatapipelinesRepository.get_pipeline_by_uri(session, target_uri) + + def get_pipeline_environments(self, targer_uri) -> DataPipelineEnvironment: + engine = self.get_engine() + with engine.scoped_session() as session: + envs = DatapipelinesRepository.query_pipeline_environments( + session, targer_uri + ) + return envs + + def get_pipeline_cicd_environment( + self, pipeline: DataPipeline + ) -> Environment: + envname = os.environ.get("envname", "local") + engine = db.get_engine(envname=envname) + with engine.scoped_session() as session: + return EnvironmentService.get_environment_by_uri(session, pipeline.environmentUri) + + def get_env_team(self, pipeline: DataPipeline) -> EnvironmentGroup: + engine = self.get_engine() + with engine.scoped_session() as session: + env = EnvironmentService.get_environment_group( + session, pipeline.SamlGroupName, pipeline.environmentUri + ) + return env + + def __init__(self, scope, id, target_uri: str = None, **kwargs): + kwargs.setdefault("tags", {}).update({"utility": "dataall-data-pipeline"}) + super().__init__( + scope, + id, + env=kwargs.get("env"), + stack_name=kwargs.get("stack_name"), + tags=kwargs.get("tags"), + description="Cloud formation stack of PIPELINE: {}; URI: {}; DESCRIPTION: {}".format( + self.get_target(target_uri=target_uri).label, + target_uri, + self.get_target(target_uri=target_uri).description, + )[ + :1024 + ], + ) + + # Configuration + self.target_uri = target_uri + + pipeline = self.get_target(target_uri=target_uri) + pipeline_environment = self.get_pipeline_cicd_environment(pipeline=pipeline) + pipeline_env_team = self.get_env_team(pipeline=pipeline) + # Development environments + development_environments = self.get_pipeline_environments(targer_uri=target_uri) + self.devStages = [env.stage for env in development_environments] + + # Support resources + build_role_policy = iam.Policy( + self, + f"{pipeline.name}-policy", + policy_name=f"{pipeline.name}-policy", + statements=self.make_codebuild_policy_statements( + pipeline_environment=pipeline_environment, + pipeline_env_team=pipeline_env_team, + pipeline=pipeline + ), + ) + + build_project_role = iam.Role( + self, + "PipelineRole", + role_name=pipeline.name, + inline_policies={f"Inline{pipeline.name}": build_role_policy.document}, + assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), + ) + + self.codebuild_key = kms.Key( + self, + f"{pipeline.name}-codebuild-key", + removal_policy=RemovalPolicy.DESTROY, + alias=f"{pipeline.name}-codebuild-key", + enable_key_rotation=True, + admins=[ + iam.ArnPrincipal(pipeline_environment.CDKRoleArn), + ], + policy=iam.PolicyDocument( + statements=[ + iam.PolicyStatement( + resources=["*"], + effect=iam.Effect.ALLOW, + principals=[ + build_project_role + ], + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + ], + ), + iam.PolicyStatement( + resources=["*"], + effect=iam.Effect.ALLOW, + principals=[ + iam.ArnPrincipal(pipeline_env_team.environmentIAMRoleArn), + build_project_role + ], + actions=[ + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ], + ) + ], + ), + ) + + # Create CodeCommit repository and mirror blueprint code + code_dir_path = os.path.realpath( + os.path.abspath( + os.path.join( + __file__, "..", "..", "blueprints", "data_pipeline_blueprint" + ) + ) + ) + logger.info(f"code directory path = {code_dir_path}") + env_vars, aws = PipelineStack._set_env_vars(pipeline_environment) + try: + repository = PipelineStack._check_repository(aws, pipeline_environment.region, pipeline.repo) + if repository: + PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file="ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) + + logger.info(f"Pipeline Repo {pipeline.repo} Exists...Handling Update") + update_cmds = [ + f'REPO_NAME={pipeline.repo}', + 'COMMITID=$(aws codecommit get-branch --repository-name ${REPO_NAME} --branch-name main --query branch.commitId --output text)', + 'aws codecommit put-file --repository-name ${REPO_NAME} --branch-name main --file-content file://ddk.json --file-path ddk.json --parent-commit-id ${COMMITID} --cli-binary-format raw-in-base64-out', + ] + + process = subprocess.run( + "; ".join(update_cmds), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=code_dir_path, + env=env_vars + ) + else: + raise Exception + except Exception as e: + PipelineStack.initialize_repo(pipeline, code_dir_path, env_vars) + + PipelineStack.write_deploy_buildspec(path=code_dir_path, output_file=f"{pipeline.repo}/deploy_buildspec.yaml") + + PipelineStack.write_ddk_json_multienvironment(path=code_dir_path, output_file=f"{pipeline.repo}/ddk.json", pipeline_environment=pipeline_environment, development_environments=development_environments) + + logger.info(f"Pipeline Repo {pipeline.repo} Does Not Exists... Creating Repository") + + PipelineStack.cleanup_zip_directory(code_dir_path) + + PipelineStack.zip_directory(os.path.join(code_dir_path, pipeline.repo)) + code_asset = Asset( + scope=self, id=f"{pipeline.name}-asset", path=f"{code_dir_path}/{pipeline.repo}/code.zip" + ) + + code = codecommit.CfnRepository.CodeProperty( + s3=codecommit.CfnRepository.S3Property( + bucket=code_asset.s3_bucket_name, + key=code_asset.s3_object_key, + ) + ) + + repository = codecommit.CfnRepository( + scope=self, + code=code, + id="CodecommitRepository", + repository_name=pipeline.repo, + ) + repository.apply_removal_policy(RemovalPolicy.RETAIN) + + if pipeline.devStrategy == "trunk": + codepipeline_pipeline = codepipeline.Pipeline( + scope=self, + id=pipeline.name, + pipeline_name=pipeline.name, + restart_execution_on_update=True, + ) + self.codepipeline_pipeline = codepipeline_pipeline + self.source_artifact = codepipeline.Artifact() + + codepipeline_pipeline.add_stage( + stage_name='Source', + actions=[ + codepipeline_actions.CodeCommitSourceAction( + action_name='CodeCommit', + branch='main', + output=self.source_artifact, + trigger=codepipeline_actions.CodeCommitTrigger.POLL, + repository=codecommit.Repository.from_repository_name( + self, 'source_blueprint_repo', repository_name=pipeline.repo + ), + ) + ], + ) + + for env in sorted(development_environments, key=lambda env: env.order): + buildspec = "deploy_buildspec.yaml" + build_project = codebuild.PipelineProject( + scope=self, + id=f'{pipeline.name}-build-{env.stage}', + environment=codebuild.BuildEnvironment( + privileged=True, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + environment_variables=PipelineStack.make_environment_variables( + pipeline=pipeline, + pipeline_environment=env, + pipeline_env_team=env.samlGroupName, + stage=env.stage, + stages=self.devStages + ), + ), + role=build_project_role, + build_spec=codebuild.BuildSpec.from_source_filename(buildspec), + encryption_key=self.codebuild_key, + ) + + self.codepipeline_pipeline.add_stage( + stage_name=f'Deploy-Stage-{env.stage}', + actions=[ + codepipeline_actions.CodeBuildAction( + action_name=f'deploy-{env.stage}', + input=self.source_artifact, + project=build_project, + outputs=[codepipeline.Artifact()], + ) + ], + ) + + # Skip manual approval for one stage pipelines and for last stage + if env.order < development_environments.count(): + self.codepipeline_pipeline.add_stage( + stage_name=f'ManualApproval-{env.stage}', + actions=[ + codepipeline_actions.ManualApprovalAction( + action_name=f'ManualApproval-{env.stage}' + ) + ], + ) + + else: + for env in development_environments: + branch_name = 'main' if (env.stage == 'prod') else env.stage + buildspec = "deploy_buildspec.yaml" + + codepipeline_pipeline = codepipeline.Pipeline( + scope=self, + id=f"{pipeline.name}-{env.stage}", + pipeline_name=f"{pipeline.name}-{env.stage}", + restart_execution_on_update=True, + ) + self.codepipeline_pipeline = codepipeline_pipeline + self.source_artifact = codepipeline.Artifact() + + codepipeline_pipeline.add_stage( + stage_name=f'Source-{env.stage}', + actions=[ + codepipeline_actions.CodeCommitSourceAction( + action_name='CodeCommit', + branch=branch_name, + output=self.source_artifact, + trigger=codepipeline_actions.CodeCommitTrigger.POLL, + repository=codecommit.Repository.from_repository_name( + self, f'source_blueprint_repo_{env.stage}', repository_name=pipeline.repo + ), + ) + ], + ) + + build_project = codebuild.PipelineProject( + scope=self, + id=f'{pipeline.name}-build-{env.stage}', + environment=codebuild.BuildEnvironment( + privileged=True, + build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, + environment_variables=PipelineStack.make_environment_variables( + pipeline=pipeline, + pipeline_environment=env, + pipeline_env_team=env.samlGroupName, + stage=env.stage, + stages=self.devStages + ), + ), + role=build_project_role, + build_spec=codebuild.BuildSpec.from_source_filename(buildspec), + encryption_key=self.codebuild_key, + ) + + self.codepipeline_pipeline.add_stage( + stage_name=f'Deploy-Stage-{env.stage}', + actions=[ + codepipeline_actions.CodeBuildAction( + action_name=f'deploy-{env.stage}', + input=self.source_artifact, + project=build_project, + outputs=[codepipeline.Artifact()], + ) + ], + ) + + # CloudFormation output + CfnOutput( + self, + "RepoNameOutput", + export_name=f"{pipeline.DataPipelineUri}-RepositoryName", + value=pipeline.repo, + ) + CfnOutput( + self, + "PipelineNameOutput", + export_name=f"{pipeline.DataPipelineUri}-PipelineName", + value=codepipeline_pipeline.pipeline_name, + ) + + TagsUtil.add_tags(stack=self, model=DataPipeline, target_type="pipeline") + + CDKNagUtil.check_rules(self) + + PipelineStack.cleanup_zip_directory(code_dir_path) + PipelineStack.cleanup_pipeline_directory(os.path.join(code_dir_path, pipeline.repo)) + + @staticmethod + def zip_directory(path): + try: + shutil.make_archive("code", "zip", path) + shutil.move("code.zip", f"{path}/code.zip") + except Exception as e: + logger.error(f"Failed to zip repository due to: {e}") + + @staticmethod + def cleanup_zip_directory(path): + if os.path.isfile(f"{path}/code.zip"): + os.remove(f"{path}/code.zip") + else: + logger.info("Info: %s Zip not found" % f"{path}/code.zip") + + @staticmethod + def cleanup_pipeline_directory(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + logger.info("Info: %s Directory not found" % f"{path}") + + @staticmethod + def make_environment_variables( + pipeline, + pipeline_environment, + pipeline_env_team, + stage, + stages + ): + + env_vars_1 = { + "PIPELINE_URI": codebuild.BuildEnvironmentVariable(value=pipeline.DataPipelineUri), + "PIPELINE_NAME": codebuild.BuildEnvironmentVariable(value=pipeline.name), + "STAGE": codebuild.BuildEnvironmentVariable(value=stage), + "DEV_STAGES": codebuild.BuildEnvironmentVariable(value=stages), + "DEV_STRATEGY": codebuild.BuildEnvironmentVariable(value=pipeline.devStrategy), + "TEMPLATE": codebuild.BuildEnvironmentVariable(value=pipeline.template), + "ENVIRONMENT_URI": codebuild.BuildEnvironmentVariable(value=pipeline_environment.environmentUri), + "AWSACCOUNTID": codebuild.BuildEnvironmentVariable(value=pipeline_environment.AwsAccountId), + "AWSREGION": codebuild.BuildEnvironmentVariable(value=pipeline_environment.region), + "ENVTEAM_ROLENAME": codebuild.BuildEnvironmentVariable(value=pipeline_env_team), + } + env_vars = dict(env_vars_1) + return env_vars + + @staticmethod + def write_deploy_buildspec(path, output_file): + yaml = """ + version: '0.2' + env: + git-credential-helper: yes + phases: + pre_build: + commands: + - n 16.15.1 + - npm install -g aws-cdk + - pip install aws-ddk + - pip install -r requirements.txt + build: + commands: + - aws sts get-caller-identity + - ddk deploy + """ + with open(f'{path}/{output_file}', 'x') as text_file: + print(yaml, file=text_file) + + @staticmethod + def make_codebuild_policy_statements( + pipeline_environment, + pipeline_env_team, + pipeline + ) -> List[iam.PolicyStatement]: + return [ + iam.PolicyStatement( + actions=[ + "ec2:DescribeAvailabilityZones", + "secretsmanager:GetSecretValue", + "secretsmanager:DescribeSecret", + "ssm:GetParametersByPath", + "ssm:GetParameters", + "ssm:GetParameter", + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases", + "codebuild:BatchPutCodeCoverages", + "codecommit:ListRepositories", + "sts:AssumeRole", + "cloudformation:DescribeStacks" + ], + resources=["*"], + ), + iam.PolicyStatement( + actions=[ + "codecommit:*" + ], + resources=[f"arn:aws:codecommit:{pipeline_environment.region}:{pipeline_environment.AwsAccountId}:{pipeline.repo}"], + ) + ] + + @staticmethod + def write_ddk_json_multienvironment(path, output_file, pipeline_environment, development_environments): + json_envs = "" + for env in development_environments: + json_env = f""", + "{env.stage}": {{ + "account": "{env.AwsAccountId}", + "region": "{env.region}", + "stage": "{env.stage}", + "env_vars": {{ + "database": "example_database", + "Team": "{env.samlGroupName}" + }} + }}""" + json_envs = json_envs + json_env + + json = f"""{{ + "environments": {{ + "cicd": {{ + "account": "{pipeline_environment.AwsAccountId}", + "region": "{pipeline_environment.region}", + "stage": "cicd" + }}{json_envs} + }} +}}""" + + with open(f'{path}/{output_file}', 'w') as text_file: + print(json, file=text_file) + + def initialize_repo(pipeline, code_dir_path, env_vars): + + venv_name = ".venv" + + cmd_init = [ + f"ddk init {pipeline.repo} --generate-only", + f"cp app_multiaccount.py ./{pipeline.repo}/app.py", + f"cp ddk_app/ddk_app_stack_multiaccount.py ./{pipeline.repo}/ddk_app/ddk_app_stack.py", + f"mkdir ./{pipeline.repo}/utils", + f"cp -R utils/* ./{pipeline.repo}/utils/" + ] + + logger.info(f"Running Commands: {'; '.join(cmd_init)}") + + process = subprocess.run( + '; '.join(cmd_init), + text=True, + shell=True, # nosec + encoding='utf-8', + cwd=code_dir_path, + env=env_vars + ) + if process.returncode == 0: + logger.info("Successfully Initialized New CDK/DDK App") + return + + @staticmethod + def _set_env_vars(pipeline_environment): + aws = SessionHelper.remote_session(pipeline_environment.AwsAccountId) + env_creds = aws.get_credentials() + + env = { + 'AWS_REGION': pipeline_environment.region, + 'AWS_DEFAULT_REGION': pipeline_environment.region, + 'CURRENT_AWS_ACCOUNT': pipeline_environment.AwsAccountId, + 'envname': os.environ.get('envname', 'local'), + 'COOKIECUTTER_CONFIG': "/dataall/modules/datapipelines/blueprints/cookiecutter_config.yaml", + } + if env_creds: + env.update( + { + 'AWS_ACCESS_KEY_ID': env_creds.access_key, + 'AWS_SECRET_ACCESS_KEY': env_creds.secret_key, + 'AWS_SESSION_TOKEN': env_creds.token + } + ) + return env, aws + + @staticmethod + def _check_repository(aws, region, repo_name): + codecommit_client = aws.client('codecommit', region_name=region) + repository = None + logger.info(f"Checking Repository Exists: {repo_name}") + try: + repository = codecommit_client.get_repository(repositoryName=repo_name) + except ClientError as e: + if e.response['Error']['Code'] == 'RepositoryDoesNotExistException': + logger.debug(f'Repository does not exists {repo_name} %s', e) + else: + raise e + return repository if repository else None diff --git a/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_cicd_policy.py b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_cicd_policy.py new file mode 100644 index 000000000..db80be6f0 --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_cicd_policy.py @@ -0,0 +1,177 @@ +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.datapipelines.services.datapipelines_permissions import CREATE_PIPELINE +from aws_cdk import aws_iam as iam + + +class AwsCICD(ServicePolicy): + """ + Class including all permissions needed to work with AWS CICD services: CodeCommit, CodePipeline and CodeBuild. + It allows data.all users to: + - Create and manage CodeBuild, CodeCommit and CodePipeline resources for the team + - Create an S3 Bucket for codepipeline prefixed by "codepipeline-" + - Read/Write to and from S3 Buckets prefixed by "codepipeline-" + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_PIPELINE not in group_permissions: + return [] + statements = [ + iam.PolicyStatement( + # sid="GenericCodeCommit", + actions=[ + 'codecommit:List*', + 'codecommit:CreateApprovalRuleTemplate', + 'codecommit:UpdateApprovalRuleTemplateName', + 'codecommit:GetApprovalRuleTemplate', + 'codecommit:DeleteApprovalRuleTemplate', + 'codecommit:UpdateApprovalRuleTemplateContent', + 'codecommit:UpdateApprovalRuleTemplateDescription', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="TagCICD", + actions=[ + "codecommit:TagResource", + "codepipeline:TagResource" + ], + resources=[ + f'arn:aws:codecommit:{self.region}:{self.account}:{self.resource_prefix}*', + f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*', + f'arn:aws:codepipeline:{self.region}:{self.account}:actiontype:/*/*/*', + f'arn:aws:codepipeline:{self.region}:{self.account}:webhook:{self.resource_prefix}', + ], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value], + }, + }, + ), + iam.PolicyStatement( + # sid="AllCodecommitTeamRepo", + not_actions=[ + "codecommit:TagResource", + "codecommit:UntagResource", + ], + resources=[ + f'arn:aws:codecommit:{self.region}:{self.account}:{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value], + }, + }, + ), + iam.PolicyStatement( + # sid="GenericCodePipeline", + actions=[ + 'codepipeline:AcknowledgeJob', + 'codepipeline:AcknowledgeThirdPartyJob', + 'codepipeline:GetThirdPartyJobDetails', + 'codepipeline:GetJobDetails', + 'codepipeline:GetActionType', + 'codepipeline:ListActionTypes', + 'codepipeline:ListPipelines', + 'codepipeline:PollForThirdPartyJobs', + 'codepipeline:PutThirdPartyJobSuccessResult', + 'codepipeline:PutThirdPartyJobFailureResult', + 'codepipeline:PutJobFailureResult', + 'codepipeline:PutJobSuccessResult', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="AllCodepipelineTeamRepo", + not_actions=[ + "codepipeline:TagResource", + "codepipeline:UntagResource", + ], + resources=[ + f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*/*/*', + f'arn:aws:codepipeline:{self.region}:{self.account}:actiontype:/*/*/*', + f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*', + f'arn:aws:codepipeline:{self.region}:{self.account}:{self.resource_prefix}*/*', + f'arn:aws:codepipeline:{self.region}:{self.account}:webhook:{self.resource_prefix}', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="CodePipelineCreateS3Bucket", + effect=iam.Effect.ALLOW, + actions=[ + 's3:CreateBucket', + 's3:ListBucket', + 's3:PutBucketPublicAccessBlock', + 's3:GetObject', + 's3:PutObject', + 's3:DeleteObject' + ], + resources=[ + f"arn:aws:s3:::codepipeline-{self.region}-{self.account}", + f"arn:aws:s3:::codepipeline-{self.region}-{self.account}/{self.resource_prefix}*" + ], + ), + iam.PolicyStatement( + # sid="GenericCodeBuild", + actions=[ + 'codebuild:ListCuratedEnvironmentImages', + 'codebuild:ListReportGroups', + 'codebuild:ListSourceCredentials', + 'codebuild:ListRepositories', + 'codebuild:ListSharedProjects', + 'codebuild:ListBuildBatches', + 'codebuild:ListSharedReportGroups', + 'codebuild:ImportSourceCredentials', + 'codebuild:ListReports', + 'codebuild:ListBuilds', + 'codebuild:DeleteOAuthToken', + 'codebuild:ListProjects', + 'codebuild:DeleteSourceCredentials', + 'codebuild:PersistOAuthToken', + 'codebuild:ListConnectedOAuthAccounts', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="TagCodebuildTeamRepo", + actions=[ + 'codebuild:CreateProject', + 'codebuild:UpdateProject', + 'codebuild:UpdateProjectVisibility', + 'codebuild:CreateReportGroup', + 'codebuild:UpdateReportGroup', + ], + resources=[ + f'arn:aws:codebuild:{self.region}:{self.account}:project/{self.resource_prefix}*', + f'arn:aws:codebuild:{self.region}:{self.account}:report-group/{self.resource_prefix}*', + ], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="AllCodebuildTeamRepo", + not_actions=[ + 'codebuild:CreateProject', + 'codebuild:UpdateProject', + 'codebuild:UpdateProjectVisibility', + 'codebuild:CreateReportGroup', + 'codebuild:UpdateReportGroup', + ], + resources=[ + f'arn:aws:codebuild:{self.region}:{self.account}:project/{self.resource_prefix}*', + f'arn:aws:codebuild:{self.region}:{self.account}:report-group/{self.resource_prefix}*', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ) + ] + return statements diff --git a/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_lambda_policy.py b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_lambda_policy.py new file mode 100644 index 000000000..8273feec9 --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_lambda_policy.py @@ -0,0 +1,102 @@ +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.datapipelines.services.datapipelines_permissions import CREATE_PIPELINE +from aws_cdk import aws_iam as iam + + +class Lambda(ServicePolicy): + """ + Class including all permissions needed to work with AWS Lambda. + It allows data.all users to: + - List Lambda resources + - Create and manage team Lambda resources + - Log Lambda executions + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_PIPELINE not in group_permissions: + return [] + + statements = [ + iam.PolicyStatement( + # sid="ListLambda", + actions=[ + 'lambda:List*', + 'lambda:GetLayer*', + 'lambda:GetAccountSettings', + 'lambda:GetEventSourceMapping', + 'lambda:CreateEventSourceMapping', + 'lambda:CreateCodeSigningConfig', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="GenericLambdaFunctions", + actions=[ + 'lambda:UpdateFunctionCodeSigningConfig', + 'lambda:UpdateEventSourceMapping', + ], + resources=[ + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*', + f'arn:aws:lambda:{self.region}:{self.account}:code-signing-config:*', + f'arn:aws:lambda:{self.region}:{self.account}:event-source-mapping:*', + ], + ), + iam.PolicyStatement( + # sid="CreateTeamLambda", + actions=[ + 'lambda:CreateFunction', + 'lambda:TagResource', + ], + resources=[ + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*', + ], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="ManageTeamLambda", + not_actions=[ + 'lambda:CreateFunction', + 'lambda:TagResource', + 'lambda:UntagResource', + ], + resources=[ + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*', + f'arn:aws:lambda:{self.region}:{self.account}:function:{self.resource_prefix}*:*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="ManageLambdaLayers", + actions=[ + 'lambda:PublishLayerVersion', + 'lambda:DeleteLayerVersion', + ], + resources=[ + f'arn:aws:lambda:{self.region}:{self.account}:layer:{self.resource_prefix}*', + f'arn:aws:lambda:{self.region}:{self.account}:layer:{self.resource_prefix}*:*', + ] + ), + iam.PolicyStatement( + # sid="LoggingLambda", + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/lambda/*', + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/lambda/*:log-stream:*', + ], + ) + ] + return statements diff --git a/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_stepfunctions_policy.py b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_stepfunctions_policy.py new file mode 100644 index 000000000..9e7865d56 --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/env_role_datapipelines_stepfunctions_policy.py @@ -0,0 +1,69 @@ +from aws_cdk import aws_iam as aws_iam + +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.datapipelines.services.datapipelines_permissions import CREATE_PIPELINE + + +class StepFunctions(ServicePolicy): + """ + Class including all permissions needed to work with AWS Step Functions. + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_PIPELINE not in group_permissions: + return [] + + return [ + aws_iam.PolicyStatement( + # sid='ListMonitorStepFunctions', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'states:ListStateMachines', + 'states:ListActivities', + 'states:SendTaskFailure', + 'states:SendTaskSuccess', + 'states:SendTaskHeartbeat', + ], + resources=['*'], + ), + aws_iam.PolicyStatement( + # sid='CreateTeamStepFunctions', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'states:CreateStateMachine', + 'states:UpdateStateMachine', + 'states:CreateActivity', + 'states:TagResource' + ], + resources=[ + f'arn:aws:states:{self.region}:{self.account}:stateMachine:{self.resource_prefix}*', + f'arn:aws:states:{self.region}:{self.account}:activity:{self.resource_prefix}*', + ], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value] + } + }, + ), + aws_iam.PolicyStatement( + # sid='ManageTeamStepFunctions', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'states:Delete*', + 'states:Describe*', + 'states:Get*', + 'states:List*', + 'states:Start*', + 'states:StopExecution' + ], + resources=[ + f'arn:aws:states:{self.region}:{self.account}:execution:{self.resource_prefix}*:*', + f'arn:aws:states:{self.region}:{self.account}:activity:{self.resource_prefix}*', + f'arn:aws:states:{self.region}:{self.account}:stateMachine:{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + ] diff --git a/backend/dataall/modules/datapipelines/cdk/pivot_role_datapipelines_policy.py b/backend/dataall/modules/datapipelines/cdk/pivot_role_datapipelines_policy.py new file mode 100644 index 000000000..0bed7e176 --- /dev/null +++ b/backend/dataall/modules/datapipelines/cdk/pivot_role_datapipelines_policy.py @@ -0,0 +1,65 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class PipelinesPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS CodeCommit and STS assume for DDK pipelines + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='CodeCommitPipelines', + effect=iam.Effect.ALLOW, + actions=[ + 'codecommit:GetFile', + 'codecommit:ListBranches', + 'codecommit:GetFolder', + 'codecommit:GetCommit', + 'codecommit:GitPull', + 'codecommit:GetRepository', + 'codecommit:TagResource', + 'codecommit:UntagResource', + 'codecommit:CreateBranch', + 'codecommit:CreateCommit', + 'codecommit:CreateRepository', + 'codecommit:DeleteRepository', + 'codecommit:GitPush', + 'codecommit:PutFile', + 'codecommit:GetBranch', + ], + resources=[f'arn:aws:codecommit:*:{self.account}:{self.env_resource_prefix}*'], + ), + iam.PolicyStatement( + sid='STSPipelines', + effect=iam.Effect.ALLOW, + actions=['sts:AssumeRole'], + resources=[ + f'arn:aws:iam::{self.account}:role/ddk-*', + ], + ), + iam.PolicyStatement( + sid='CloudFormationDataPipelines', + effect=iam.Effect.ALLOW, + actions=[ + "cloudformation:DeleteStack", + "cloudformation:DescribeStacks", + "cloudformation:DescribeStackEvents", + "cloudformation:DescribeStackResources" + ], + resources=[ + f'arn:aws:cloudformation:*:{self.account}:stack/*/*', + ], + ), + iam.PolicyStatement( + sid='ParameterStoreDDK', + effect=iam.Effect.ALLOW, + actions=['ssm:GetParameter'], + resources=[ + f'arn:aws:ssm:*:{self.account}:parameter/ddk/*', + ], + ), + ] + return statements diff --git a/backend/dataall/modules/datapipelines/db/__init__.py b/backend/dataall/modules/datapipelines/db/__init__.py new file mode 100644 index 000000000..86631d191 --- /dev/null +++ b/backend/dataall/modules/datapipelines/db/__init__.py @@ -0,0 +1 @@ +"""Contains a code to that interacts with the database""" diff --git a/backend/dataall/modules/datapipelines/db/datapipelines_models.py b/backend/dataall/modules/datapipelines/db/datapipelines_models.py new file mode 100644 index 000000000..0942e4ca1 --- /dev/null +++ b/backend/dataall/modules/datapipelines/db/datapipelines_models.py @@ -0,0 +1,34 @@ +from sqlalchemy import Column, String, ForeignKey, Integer +from sqlalchemy.orm import query_expression +from sqlalchemy.dialects import postgresql + +from dataall.base.db import Base, Resource, utils + + +class DataPipeline(Resource, Base): + __tablename__ = 'datapipeline' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + DataPipelineUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('DataPipelineUri') + ) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + SamlGroupName = Column(String, nullable=False) + repo = Column(String, nullable=False) + devStrategy = Column(String, nullable=False) + template = Column(String, nullable=True, default="") + userRoleForPipeline = query_expression() + + +class DataPipelineEnvironment(Base, Resource): + __tablename__ = 'datapipelineenvironments' + envPipelineUri = Column(String, nullable=False, primary_key=True) + environmentUri = Column(String, nullable=False) + environmentLabel = Column(String, nullable=False) + pipelineUri = Column(String, nullable=False) + pipelineLabel = Column(String, nullable=False) + stage = Column(String, nullable=False) + order = Column(Integer, nullable=False) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + samlGroupName = Column(String, nullable=False) diff --git a/backend/dataall/modules/datapipelines/db/datapipelines_repositories.py b/backend/dataall/modules/datapipelines/db/datapipelines_repositories.py new file mode 100644 index 000000000..69426f349 --- /dev/null +++ b/backend/dataall/modules/datapipelines/db/datapipelines_repositories.py @@ -0,0 +1,157 @@ +from sqlalchemy import or_, and_ +from sqlalchemy.orm import Query + +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.core.stacks.db.stack_models import Stack +from dataall.base.db import exceptions, paginate +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment + + +class DatapipelinesRepository(EnvironmentResource): + """DAO layer for datapipelines""" + _DEFAULT_PAGE = 1 + _DEFAULT_PAGE_SIZE = 10 + + def count_resources(self, session, environment, group_uri) -> int: + return ( + session.query(DataPipeline) + .filter( + and_( + DataPipeline.environmentUri == environment.environmentUri, + DataPipeline.SamlGroupName == group_uri + )) + .count() + ) + + @staticmethod + def get_clone_url_http(session, environmentUri, repo): + env: Environment = session.query(Environment).get( + environmentUri + ) + return f'codecommit::{env.region}://{repo}' + + @staticmethod + def get_pipeline_by_uri(session, uri): + pipeline: DataPipeline = session.query(DataPipeline).get(uri) + if not pipeline: + raise exceptions.ObjectNotFound('DataPipeline', uri) + return pipeline + + @staticmethod + def get_pipeline_environment_by_uri(session, uri): + pipeline_env: DataPipelineEnvironment = session.query(DataPipelineEnvironment).get(uri) + if not pipeline_env: + raise exceptions.ObjectNotFound('PipelineEnvironment', uri) + return pipeline_env + + @staticmethod + def get_pipeline_and_environment_by_uri(session, uri): + pipeline: DataPipeline = session.query(DataPipeline).get(uri) + env: Environment = session.query(Environment).get(pipeline.environmentUri) + return (pipeline, env) + + @staticmethod + def get_pipeline_stack_by_uri(session, uri): + return ( + session.query(Stack) + .filter( + and_( + Stack.targetUri == uri, + Stack.stack == 'PipelineStack', + )) + .first() + ) + + @staticmethod + def query_user_pipelines(session, username, groups, filter) -> Query: + query = session.query(DataPipeline).filter( + or_( + DataPipeline.owner == username, + DataPipeline.SamlGroupName.in_(groups), + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + DataPipeline.description.ilike(filter.get('term') + '%%'), + DataPipeline.label.ilike(filter.get('term') + '%%'), + ) + ) + if filter and filter.get('region'): + if len(filter.get('region')) > 0: + query = query.filter( + DataPipeline.region.in_(filter.get('region')) + ) + if filter and filter.get('tags'): + if len(filter.get('tags')) > 0: + query = query.filter( + or_( + *[DataPipeline.tags.any(tag) for tag in filter.get('tags')] + ) + ) + if filter and filter.get('type'): + if len(filter.get('type')) > 0: + query = query.filter( + DataPipeline.devStrategy.in_(filter.get('type')) + ) + return query + + @staticmethod + def paginated_user_pipelines( + session, username, groups, data=None + ) -> dict: + return paginate( + query=DatapipelinesRepository.query_user_pipelines(session, username, groups, data), + page=data.get('page', DatapipelinesRepository._DEFAULT_PAGE), + page_size=data.get('pageSize', DatapipelinesRepository._DEFAULT_PAGE_SIZE), + ).to_dict() + + @staticmethod + def query_pipeline_environments(session, uri) -> Query: + query = session.query(DataPipelineEnvironment).filter( + DataPipelineEnvironment.pipelineUri.ilike(uri + '%%'), + ) + return query + + @staticmethod + def paginated_pipeline_environments( + session, uri, data=None + ) -> dict: + return paginate( + query=DatapipelinesRepository.query_pipeline_environments(session, uri), + page=data.get('page', DatapipelinesRepository._DEFAULT_PAGE), + page_size=data.get('pageSize', DatapipelinesRepository._DEFAULT_PAGE_SIZE), + ).to_dict() + + @staticmethod + def delete_pipeline_environments(session, uri) -> bool: + deletedItems = ( + session.query(DataPipelineEnvironment).filter( + DataPipelineEnvironment.pipelineUri == uri).delete() + ) + session.commit() + return True + + @staticmethod + def delete_pipeline_environment( + session, envPipelineUri + ) -> bool: + deletedItem = ( + session.query(DataPipelineEnvironment).filter( + DataPipelineEnvironment.envPipelineUri == envPipelineUri).delete() + ) + session.commit() + return True + + @staticmethod + def get_pipeline_environment( + session, pipelineUri, environmentUri, stage + ) -> DataPipelineEnvironment: + return session.query(DataPipelineEnvironment).filter( + and_( + DataPipelineEnvironment.pipelineUri == pipelineUri, + DataPipelineEnvironment.environmentUri == environmentUri, + DataPipelineEnvironment.stage == stage + ) + ).first() diff --git a/backend/dataall/modules/datapipelines/handlers/__init__.py b/backend/dataall/modules/datapipelines/handlers/__init__.py new file mode 100644 index 000000000..ac47d23ca --- /dev/null +++ b/backend/dataall/modules/datapipelines/handlers/__init__.py @@ -0,0 +1,9 @@ +""" +Contains code with the handlers that are need for async +processing in a separate lambda function +""" +from dataall.modules.datapipelines.handlers import ( + codecommit_datapipeline_handler, +) + +__all__ = ["codecommit_datapipeline_handler"] diff --git a/backend/dataall/modules/datapipelines/handlers/codecommit_datapipeline_handler.py b/backend/dataall/modules/datapipelines/handlers/codecommit_datapipeline_handler.py new file mode 100644 index 000000000..56c2f5bd8 --- /dev/null +++ b/backend/dataall/modules/datapipelines/handlers/codecommit_datapipeline_handler.py @@ -0,0 +1,19 @@ +from dataall.core.tasks.service_handlers import Worker +from dataall.core.tasks.db.task_models import Task +from dataall.base.db import Engine +from dataall.modules.datapipelines.aws.codecommit_datapipeline_client import DatapipelineCodecommitClient + + +class DatapipelineCodeCommitHandler: + def __init__(self): + pass + + @staticmethod + @Worker.handler(path='repo.datapipeline.delete') + def delete_repository(engine: Engine, task: Task): + with engine.scoped_session() as session: + aws_account_id = task.payload.get('accountid', '111111111111') + region = task.payload.get('region', 'eu-west-1') + return DatapipelineCodecommitClient(aws_account_id, region).delete_repository( + repository=task.payload.get("repo_name", "dataall-repo") + ) diff --git a/backend/dataall/modules/datapipelines/services/__init__.py b/backend/dataall/modules/datapipelines/services/__init__.py new file mode 100644 index 000000000..7235c482f --- /dev/null +++ b/backend/dataall/modules/datapipelines/services/__init__.py @@ -0,0 +1 @@ +"""Contains business logic for datapipelines""" diff --git a/backend/dataall/modules/datapipelines/services/datapipelines_permissions.py b/backend/dataall/modules/datapipelines/services/datapipelines_permissions.py new file mode 100644 index 000000000..1a14da0bb --- /dev/null +++ b/backend/dataall/modules/datapipelines/services/datapipelines_permissions.py @@ -0,0 +1,44 @@ +from dataall.core.permissions.permissions import ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, \ + ENVIRONMENT_ALL, TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC + + +""" +DATAPIPELINE PERMISSIONS FOR ENVIRONMENT +""" +CREATE_PIPELINE = 'CREATE_PIPELINE' + +ENVIRONMENT_INVITED.append(CREATE_PIPELINE) + +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_PIPELINE) + +ENVIRONMENT_ALL.append(CREATE_PIPELINE) + +RESOURCES_ALL.append(CREATE_PIPELINE) + +RESOURCES_ALL_WITH_DESC[CREATE_PIPELINE] = CREATE_PIPELINE + + +""" +DATAPIPELINES +""" +MANAGE_PIPELINES = 'MANAGE_PIPELINES' + +TENANT_ALL.append(MANAGE_PIPELINES) +TENANT_ALL_WITH_DESC[MANAGE_PIPELINES] = 'Manage pipelines' + +GET_PIPELINE = 'GET_PIPELINE' +UPDATE_PIPELINE = 'UPDATE_PIPELINE' +DELETE_PIPELINE = 'DELETE_PIPELINE' +CREDENTIALS_PIPELINE = 'CREDENTIALS_PIPELINE' +PIPELINE_ALL = [ + CREATE_PIPELINE, + GET_PIPELINE, + UPDATE_PIPELINE, + DELETE_PIPELINE, + CREDENTIALS_PIPELINE, +] + +RESOURCES_ALL.extend(PIPELINE_ALL) +for perm in PIPELINE_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm +RESOURCES_ALL_WITH_DESC[CREATE_PIPELINE] = 'Create pipelines on this environment' diff --git a/backend/dataall/modules/datapipelines/services/datapipelines_service.py b/backend/dataall/modules/datapipelines/services/datapipelines_service.py new file mode 100644 index 000000000..45b4fbbf5 --- /dev/null +++ b/backend/dataall/modules/datapipelines/services/datapipelines_service.py @@ -0,0 +1,332 @@ +import json +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.base.db import exceptions +from dataall.modules.datapipelines.aws.codecommit_datapipeline_client import DatapipelineCodecommitClient +from dataall.modules.datapipelines.aws.codepipeline_datapipeline_client import CodepipelineDatapipelineClient +from dataall.modules.datapipelines.aws.glue_datapipeline_client import GlueDatapipelineClient +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository +from dataall.modules.datapipelines.services.datapipelines_permissions import DELETE_PIPELINE, \ + CREDENTIALS_PIPELINE, MANAGE_PIPELINES, CREATE_PIPELINE, PIPELINE_ALL, GET_PIPELINE, UPDATE_PIPELINE +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) +from dataall.base.utils import slugify + +logger = logging.getLogger(__name__) + + +class DataPipelineService: + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(CREATE_PIPELINE) + @has_group_permission(CREATE_PIPELINE) + def create_pipeline( + session, + admin_group, + username: str, + uri: str, + data: dict = None, + ) -> DataPipeline: + + environment = EnvironmentService.get_environment_by_uri(session, uri) + enabled = EnvironmentService.get_boolean_env_param(session, environment, "pipelinesEnabled") + + if not enabled: + raise exceptions.UnauthorizedOperation( + action=CREATE_PIPELINE, + message=f'Pipelines feature is disabled for the environment {environment.label}', + ) + + pipeline: DataPipeline = DataPipeline( + owner=username, + environmentUri=environment.environmentUri, + SamlGroupName=admin_group, + label=data['label'], + description=data.get('description', 'No description provided'), + tags=data.get('tags', []), + AwsAccountId=environment.AwsAccountId, + region=environment.region, + repo=slugify(data['label']), + devStrategy=data['devStrategy'], + template="", + ) + + session.add(pipeline) + session.commit() + + aws_compliant_name = NamingConventionService( + target_uri=pipeline.DataPipelineUri, + target_label=pipeline.label, + pattern=NamingConventionPattern.DEFAULT, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + + pipeline.repo = aws_compliant_name + pipeline.name = aws_compliant_name + + activity = Activity( + action='PIPELINE:CREATE', + label='PIPELINE:CREATE', + owner=username, + summary=f'{username} created dashboard {pipeline.label} in {environment.label}', + targetUri=pipeline.DataPipelineUri, + targetType='pipeline', + ) + session.add(activity) + + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlGroupName'], + permissions=PIPELINE_ALL, + resource_uri=pipeline.DataPipelineUri, + resource_type=DataPipeline.__name__, + ) + + if environment.SamlGroupName != pipeline.SamlGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=PIPELINE_ALL, + resource_uri=pipeline.DataPipelineUri, + resource_type=DataPipeline.__name__, + ) + + return pipeline + + @staticmethod + @has_group_permission(CREATE_PIPELINE) + def create_pipeline_environment( + session, + admin_group, + uri, + username: str, + data: dict = None, + ) -> DataPipelineEnvironment: + + environment = EnvironmentService.get_environment_by_uri(session, data['environmentUri']) + enabled = EnvironmentService.get_boolean_env_param(session, environment, "pipelinesEnabled") + + if not enabled: + raise exceptions.UnauthorizedOperation( + action=CREATE_PIPELINE, + message=f'Pipelines feature is disabled for the environment {environment.label}', + ) + + pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, data['pipelineUri']) + + pipeline_env: DataPipelineEnvironment = DataPipelineEnvironment( + owner=username, + label=f"{pipeline.label}-{environment.label}", + environmentUri=environment.environmentUri, + environmentLabel=environment.label, + pipelineUri=pipeline.DataPipelineUri, + pipelineLabel=pipeline.label, + envPipelineUri=f"{pipeline.DataPipelineUri}{environment.environmentUri}{data['stage']}", + AwsAccountId=environment.AwsAccountId, + region=environment.region, + stage=data['stage'], + order=data['order'], + samlGroupName=data['samlGroupName'] + ) + + session.add(pipeline_env) + session.commit() + + return pipeline_env + + @staticmethod + def validate_group_membership( + session, environment_uri, pipeline_group, username, groups + ): + if pipeline_group and pipeline_group not in groups: + raise exceptions.UnauthorizedOperation( + action=CREATE_PIPELINE, + message=f'User: {username} is not a member of the team {pipeline_group}', + ) + if pipeline_group not in EnvironmentService.list_environment_groups( + session=session, + uri=environment_uri, + ): + raise exceptions.UnauthorizedOperation( + action=CREATE_PIPELINE, + message=f'Team: {pipeline_group} is not a member of the environment {environment_uri}', + ) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(GET_PIPELINE) + def get_pipeline( + session, + uri: str, + ) -> DataPipeline: + return DatapipelinesRepository.get_pipeline_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(UPDATE_PIPELINE) + def update_pipeline( + session, uri, data=None + ) -> DataPipeline: + pipeline: DataPipeline = DatapipelinesRepository.get_pipeline_by_uri(session, uri) + if data: + if isinstance(data, dict): + for k in data.keys(): + setattr(pipeline, k, data.get(k)) + return pipeline + + @staticmethod + def delete(session, username, groups, uri, data=None, check_perm=None) -> bool: + pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, uri) + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=pipeline.SamlGroupName + ) + session.delete(pipeline) + session.commit() + return True + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(GET_PIPELINE) + def get_pipeline_environment( + session, + uri: str, + ) -> DataPipeline: + return DatapipelinesRepository.get_pipeline_environment_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(UPDATE_PIPELINE) + def update_pipeline_environment( + session, uri, data=None + ) -> DataPipelineEnvironment: + pipeline_env = DatapipelinesRepository.get_pipeline_environment( + session=session, + pipelineUri=data['pipelineUri'], + environmentUri=data['environmentUri'], + stage=data['stage'] + ) + + if data: + if isinstance(data, dict): + for k in data.keys(): + print(f"KEY: {k}, VALUE: {data.get(k)}") + setattr(pipeline_env, k, data.get(k)) + return pipeline_env + + @staticmethod + @has_resource_permission(DELETE_PIPELINE) + def delete_pipeline(session, uri, pipeline): + + DatapipelinesRepository.delete_pipeline_environments(session, uri) + + KeyValueTag.delete_key_value_tags(session, pipeline.DataPipelineUri, 'pipeline') + + session.delete(pipeline) + + ResourcePolicy.delete_resource_policy( + session=session, + resource_uri=pipeline.DataPipelineUri, + group=pipeline.SamlGroupName, + ) + + @staticmethod + def _get_creds_from_aws(pipeline, env_role_arn): + aws_account_id = pipeline.AwsAccountId + aws_session = SessionHelper.remote_session(aws_account_id) + env_session = SessionHelper.get_session(aws_session, role_arn=env_role_arn) + c = env_session.get_credentials() + body = json.dumps( + { + 'AWS_ACCESS_KEY_ID': c.access_key, + 'AWS_SECRET_ACCESS_KEY': c.secret_key, + 'AWS_SESSION_TOKEN': c.token, + } + ) + return body + + @staticmethod + @has_resource_permission(CREDENTIALS_PIPELINE) + def get_credentials(session, uri): + pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, uri) + env = EnvironmentService.get_environment_by_uri(session, pipeline.environmentUri) + + env_role_arn = env.EnvironmentDefaultIAMRoleArn + + return DataPipelineService._get_creds_from_aws(pipeline, env_role_arn) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(GET_PIPELINE) + def cat(session, input): + (pipeline, env) = DatapipelinesRepository.get_pipeline_and_environment_by_uri( + session=session, + uri=input.get('DataPipelineUri') + ) + + return DatapipelineCodecommitClient(env.AwsAccountId, env.region).get_file_content( + repository=pipeline.repo, + commit_specifier=input.get('branch', 'master'), + file_path=input.get('absolutePath', 'README.md') + ) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(GET_PIPELINE) + def ls(session, input): + (pipeline, env) = DatapipelinesRepository.get_pipeline_and_environment_by_uri( + session=session, + uri=input.get('DataPipelineUri') + ) + + return DatapipelineCodecommitClient(env.AwsAccountId, env.region).get_folder_content( + repository=pipeline.repo, + commit_specifier=input.get('branch', 'master'), + folder_path=input.get('folderPath', '/') + ) + + @staticmethod + @has_tenant_permission(MANAGE_PIPELINES) + @has_resource_permission(GET_PIPELINE) + def list_branches(session, datapipeline_uri): + (pipeline, env) = DatapipelinesRepository.get_pipeline_and_environment_by_uri( + session=session, + uri=datapipeline_uri + ) + + return DatapipelineCodecommitClient(env.AwsAccountId, env.region).list_branches( + repository=pipeline.repo + ) + + @staticmethod + def get_job_runs(session, datapipeline_uri): + data_pipeline: DataPipeline = DatapipelinesRepository.get_pipeline_by_uri( + session=session, + uri=datapipeline_uri + ) + + return GlueDatapipelineClient( + aws_account_id=data_pipeline.AwsAccountId, + region=data_pipeline.region + ).get_job_runs(datapipeline_job_name=data_pipeline.name) + + @staticmethod + def get_pipeline_execution(session, datapipeline_uri): + stack = DatapipelinesRepository.get_pipeline_stack_by_uri(session, datapipeline_uri) + datapipeline: DataPipeline = DatapipelinesRepository.get_pipeline_by_uri(session, datapipeline_uri) + outputs = stack.outputs + codepipeline_name = outputs['PipelineNameOutput'] + + return CodepipelineDatapipelineClient( + aws_account_id=datapipeline.AwsAccountId, + region=datapipeline.region + ).get_pipeline_execution_summaries(codepipeline_name=codepipeline_name) diff --git a/backend/dataall/modules/dataset_sharing/__init__.py b/backend/dataall/modules/dataset_sharing/__init__.py new file mode 100644 index 000000000..d98a13dbc --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/__init__.py @@ -0,0 +1,55 @@ +import logging +from typing import List, Type, Set + +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareEnvironmentResource +from dataall.modules.datasets_base import DatasetBaseModuleInterface +from dataall.base.loader import ModuleInterface, ImportMode + + +log = logging.getLogger(__name__) + + +class SharingApiModuleInterface(ModuleInterface): + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + + def __init__(self): + from dataall.modules.dataset_sharing import api + + EnvironmentResourceManager.register(ShareEnvironmentResource()) + log.info("API of dataset sharing has been imported") + + +class SharingAsyncHandlersModuleInterface(ModuleInterface): + """Implements ModuleInterface for dataset async lambda""" + + @staticmethod + def is_supported(modes: List[ImportMode]): + return ImportMode.HANDLERS in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + + def __init__(self): + import dataall.modules.dataset_sharing.handlers + log.info("Sharing handlers have been imported") + + +class DataSharingCdkModuleInterface(ModuleInterface): + """Implements ModuleInterface for data sharing""" + + @staticmethod + def is_supported(modes): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.dataset_sharing.cdk + + log.info("CDK module data_sharing has been imported") diff --git a/backend/dataall/modules/dataset_sharing/api/__init__.py b/backend/dataall/modules/dataset_sharing/api/__init__.py new file mode 100644 index 000000000..1a101fb7e --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.dataset_sharing.api import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/dataset_sharing/api/enums.py b/backend/dataall/modules/dataset_sharing/api/enums.py new file mode 100644 index 000000000..4a9f25075 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/enums.py @@ -0,0 +1,75 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class ShareableType(GraphQLEnumMapper): + Table = 'DatasetTable' + StorageLocation = 'DatasetStorageLocation' + View = 'View' + + +class ShareObjectPermission(GraphQLEnumMapper): + Approvers = '999' + Requesters = '800' + DatasetAdmins = '700' + NoPermission = '000' + + +class ShareObjectStatus(GraphQLEnumMapper): + Deleted = 'Deleted' + Approved = 'Approved' + Rejected = 'Rejected' + Revoked = 'Revoked' + Draft = 'Draft' + Submitted = 'Submitted' + Revoke_In_Progress = 'Revoke_In_Progress' + Share_In_Progress = 'Share_In_Progress' + Processed = 'Processed' + + +class ShareItemStatus(GraphQLEnumMapper): + Deleted = 'Deleted' + PendingApproval = 'PendingApproval' + Share_Approved = 'Share_Approved' + Share_Rejected = 'Share_Rejected' + Share_In_Progress = 'Share_In_Progress' + Share_Succeeded = 'Share_Succeeded' + Share_Failed = 'Share_Failed' + Revoke_Approved = 'Revoke_Approved' + Revoke_In_Progress = 'Revoke_In_Progress' + Revoke_Failed = 'Revoke_Failed' + Revoke_Succeeded = 'Revoke_Succeeded' + + +class ShareObjectActions(GraphQLEnumMapper): + Submit = 'Submit' + Approve = 'Approve' + Reject = 'Reject' + RevokeItems = 'RevokeItems' + Start = 'Start' + Finish = 'Finish' + FinishPending = 'FinishPending' + Delete = 'Delete' + + +class ShareItemActions(GraphQLEnumMapper): + AddItem = 'AddItem' + RemoveItem = 'RemoveItem' + Failure = 'Failure' + Success = 'Success' + + +class PrincipalType(GraphQLEnumMapper): + Any = 'Any' + Organization = 'Organization' + Environment = 'Environment' + User = 'User' + Project = 'Project' + Public = 'Public' + Group = 'Group' + ConsumptionRole = 'ConsumptionRole' + + +class ShareSortField(GraphQLEnumMapper): + created = 'created' + updated = 'updated' + label = 'label' diff --git a/backend/dataall/modules/dataset_sharing/api/input_types.py b/backend/dataall/modules/dataset_sharing/api/input_types.py new file mode 100644 index 000000000..933613fe5 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/input_types.py @@ -0,0 +1,94 @@ +from dataall.base.api.constants import * +from dataall.core.organizations.api.enums import OrganisationUserRole +from dataall.modules.dataset_sharing.api.enums import ShareableType, ShareSortField + + +NewShareObjectInput = gql.InputType( + name='NewShareObjectInput', + arguments=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), + gql.Argument(name='principalType', type=gql.NonNullableType(gql.String)), + gql.Argument(name='requestPurpose', type=gql.String), + ], +) + + +AddSharedItemInput = gql.InputType( + name='AddSharedItemInput', + arguments=[ + gql.Argument(name='itemUri', type=gql.NonNullableType(gql.String)), + gql.Argument( + name='itemType', type=gql.NonNullableType(ShareableType.toGraphQLEnum()) + ), + ], +) + + +RevokeItemsInput = gql.InputType( + name='RevokeItemsInput', + arguments=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='revokedItemUris', type=gql.NonNullableType(gql.ArrayType(gql.String))), + ], +) + + +ShareSortCriteria = gql.InputType( + name='ShareSortCriteria', + arguments=[ + gql.Argument( + name='field', type=gql.NonNullableType(ShareSortField.toGraphQLEnum()) + ), + gql.Argument( + name='direction', type=gql.NonNullableType(SortDirection.toGraphQLEnum()) + ), + ], +) + +ShareObjectFilter = gql.InputType( + name='ShareObjectFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('sort', gql.ArrayType(ShareSortCriteria)), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + gql.Argument('roles', gql.ArrayType(OrganisationUserRole.toGraphQLEnum())), + gql.Argument('tags', gql.ArrayType(gql.String)), + ], +) + + +ShareableObjectFilter = gql.InputType( + name='ShareableObjectFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument(name='isShared', type=gql.Boolean), + gql.Argument(name='isRevokable', type=gql.Boolean), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + ], +) + +EnvironmentDataItemFilter = gql.InputType( + name='EnvironmentDataItemFilter', + arguments=[ + gql.Argument('itemTypes', gql.ArrayType(gql.String)), + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + gql.Argument('uniqueShares', gql.Boolean) + ], +) + +PrincipalFilter = gql.InputType( + name='PrincipalFilter', + arguments=[ + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + gql.Argument(name='principalType', type=gql.Ref('PrincipalType')), + gql.Argument(name='term', type=gql.String), + ], +) diff --git a/backend/dataall/modules/dataset_sharing/api/mutations.py b/backend/dataall/modules/dataset_sharing/api/mutations.py new file mode 100644 index 000000000..16c797c2d --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/mutations.py @@ -0,0 +1,94 @@ +from dataall.base.api import gql +from dataall.modules.dataset_sharing.api.resolvers import * + +createShareObject = gql.MutationField( + name='createShareObject', + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='itemUri', type=gql.String), + gql.Argument(name='itemType', type=gql.String), + gql.Argument( + name='input', type=gql.NonNullableType(gql.Ref('NewShareObjectInput')) + ), + ], + type=gql.Ref('ShareObject'), + resolver=create_share_object, +) + +deleteShareObject = gql.MutationField( + name='deleteShareObject', + args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], + resolver=delete_share_object, + type=gql.Boolean +) + +addSharedItem = gql.MutationField( + name='addSharedItem', + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('AddSharedItemInput')), + ], + type=gql.Ref('ShareItem'), + resolver=add_shared_item, +) + + +removeSharedItem = gql.MutationField( + name='removeSharedItem', + args=[gql.Argument(name='shareItemUri', type=gql.NonNullableType(gql.String))], + resolver=remove_shared_item, + type=gql.Boolean, +) + +submitShareObject = gql.MutationField( + name='submitShareObject', + args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('ShareObject'), + resolver=submit_share_object, +) + +approveShareObject = gql.MutationField( + name='approveShareObject', + args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('ShareObject'), + resolver=approve_share_object, +) + + +rejectShareObject = gql.MutationField( + name='rejectShareObject', + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='rejectPurpose', type=gql.String), + ], + type=gql.Ref('ShareObject'), + resolver=reject_share_object, +) + +revokeItemsShareObject = gql.MutationField( + name='revokeItemsShareObject', + args=[gql.Argument(name='input', type=gql.Ref('RevokeItemsInput'))], + type=gql.Ref('ShareObject'), + resolver=revoke_items_share_object, +) + +updateShareRejectReason = gql.MutationField( + name='updateShareRejectReason', + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='rejectPurpose', type=gql.String) + ], + type=gql.Boolean, + resolver=update_share_reject_purpose, +) + + +updateShareRequestReason = gql.MutationField( + name='updateShareRequestReason', + args=[ + gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='requestPurpose', type=gql.String) + ], + type=gql.Boolean, + resolver=update_share_request_purpose, +) diff --git a/backend/dataall/modules/dataset_sharing/api/queries.py b/backend/dataall/modules/dataset_sharing/api/queries.py new file mode 100644 index 000000000..d7e132a47 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/queries.py @@ -0,0 +1,35 @@ +from dataall.base.api import gql +from dataall.modules.dataset_sharing.api.resolvers import * + +getShareObject = gql.QueryField( + name='getShareObject', + args=[gql.Argument(name='shareUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('ShareObject'), + resolver=get_share_object, +) + + +getShareRequestsFromMe = gql.QueryField( + name='getShareRequestsFromMe', + args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], + type=gql.Ref('ShareSearchResult'), + resolver=list_shares_in_my_outbox, +) + +getShareRequestsToMe = gql.QueryField( + name='getShareRequestsToMe', + args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], + type=gql.Ref('ShareSearchResult'), + resolver=list_shares_in_my_inbox, +) + +searchEnvironmentDataItems = gql.QueryField( + name='searchEnvironmentDataItems', + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('EnvironmentDataItemFilter')), + ], + resolver=list_shared_with_environment_data_items, + type=gql.Ref('EnvironmentPublishedItemSearchResults'), + test_scope='Dataset', +) diff --git a/backend/dataall/modules/dataset_sharing/api/resolvers.py b/backend/dataall/modules/dataset_sharing/api/resolvers.py new file mode 100644 index 000000000..c3102df11 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/resolvers.py @@ -0,0 +1,257 @@ +import logging + +from dataall.base import utils +from dataall.base.api.context import Context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.base.db.exceptions import RequiredParameter +from dataall.modules.dataset_sharing.api.enums import ShareObjectPermission +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.services.share_item_service import ShareItemService +from dataall.modules.dataset_sharing.services.share_object_service import ShareObjectService +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, DatasetTable, Dataset + +log = logging.getLogger(__name__) + + +def create_share_object( + context: Context, + source, + datasetUri: str = None, + itemUri: str = None, + itemType: str = None, + input: dict = None, +): + if not input: + raise RequiredParameter(input) + if 'principalId' not in input: + raise RequiredParameter('principalId') + if 'principalType' not in input: + raise RequiredParameter('principalType') + if 'groupUri' not in input: + raise RequiredParameter('groupUri') + + return ShareObjectService.create_share_object( + uri=input['environmentUri'], + dataset_uri=datasetUri, + item_uri=itemUri, + item_type=itemType, + group_uri=input['groupUri'], + principal_id=input['principalId'], + principal_type=input['principalType'], + requestPurpose=input.get('requestPurpose') + ) + + +def submit_share_object(context: Context, source, shareUri: str = None): + return ShareObjectService.submit_share_object(uri=shareUri) + + +def approve_share_object(context: Context, source, shareUri: str = None): + return ShareObjectService.approve_share_object(uri=shareUri) + + +def reject_share_object(context: Context, source, shareUri: str = None, rejectPurpose: str = None,): + return ShareObjectService.reject_share_object(uri=shareUri, reject_purpose=rejectPurpose) + + +def revoke_items_share_object(context: Context, source, input): + share_uri = input.get("shareUri") + revoked_uris = input.get("revokedItemUris") + return ShareItemService.revoke_items_share_object(uri=share_uri, revoked_uris=revoked_uris) + + +def delete_share_object(context: Context, source, shareUri: str = None): + return ShareObjectService.delete_share_object(uri=shareUri) + + +def add_shared_item(context, source, shareUri: str = None, input: dict = None): + return ShareItemService.add_shared_item(uri=shareUri, data=input) + + +def remove_shared_item(context, source, shareItemUri: str = None): + return ShareItemService.remove_shared_item(uri=shareItemUri) + + +def resolve_shared_item(context, source: ShareObjectItem, **kwargs): + if not source: + return None + return ShareItemService.resolve_shared_item(uri=source.shareUri, item=source) + + +def get_share_object(context, source, shareUri: str = None): + return ShareObjectService.get_share_object(uri=shareUri) + + +def resolve_user_role(context: Context, source: ShareObject, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) + if ( + dataset and ( + dataset.stewards in context.groups + or dataset.SamlAdminGroupName in context.groups + or dataset.owner == context.username + ) + ): + return ShareObjectPermission.Approvers.value + if ( + source.owner == context.username + or source.groupUri in context.groups + ): + return ShareObjectPermission.Requesters.value + else: + return ShareObjectPermission.NoPermission.value + + +def resolve_dataset(context: Context, source: ShareObject, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + ds: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) + if ds: + env: Environment = EnvironmentService.get_environment_by_uri(session, ds.environmentUri) + return { + 'datasetUri': source.datasetUri, + 'datasetName': ds.name if ds else 'NotFound', + 'SamlAdminGroupName': ds.SamlAdminGroupName if ds else 'NotFound', + 'environmentName': env.label if env else 'NotFound', + 'AwsAccountId': env.AwsAccountId if env else 'NotFound', + 'region': env.region if env else 'NotFound', + 'exists': True if ds else False, + } + + +def union_resolver(object, *_): + if isinstance(object, DatasetTable): + return 'DatasetTable' + elif isinstance(object, DatasetStorageLocation): + return 'DatasetStorageLocation' + + +def resolve_principal(context: Context, source: ShareObject, **kwargs): + if not source: + return None + + with context.engine.scoped_session() as session: + if source.principalType in ['Group', 'ConsumptionRole']: + environment = EnvironmentService.get_environment_by_uri(session, source.environmentUri) + organization = Organization.get_organization_by_uri( + session, environment.organizationUri + ) + if source.principalType in ['ConsumptionRole']: + principal = EnvironmentService.get_environment_consumption_role( + session, + source.principalId, + source.environmentUri + ) + principalName = f"{principal.consumptionRoleName} [{principal.IAMRoleArn}]" + else: + principal = EnvironmentService.get_environment_group(session, source.groupUri, source.environmentUri) + principalName = f"{source.groupUri} [{principal.environmentIAMRoleArn}]" + + return { + 'principalId': source.principalId, + 'principalType': source.principalType, + 'principalName': principalName, + 'principalIAMRoleName': source.principalIAMRoleName, + 'SamlGroupName': source.groupUri, + 'environmentUri': environment.environmentUri, + 'environmentName': environment.label, + 'AwsAccountId': environment.AwsAccountId, + 'region': environment.region, + 'organizationUri': organization.organizationUri, + 'organizationName': organization.label, + } + + +def resolve_group(context: Context, source: ShareObject, **kwargs): + if not source: + return None + return source.groupUri + + +def resolve_consumption_data(context: Context, source: ShareObject, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + ds: Dataset = DatasetRepository.get_dataset_by_uri(session, source.datasetUri) + if ds: + S3AccessPointName = utils.slugify( + source.datasetUri + '-' + source.principalId, + max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' + ) + return { + 's3AccessPointName': S3AccessPointName, + 'sharedGlueDatabase': (ds.GlueDatabaseName + '_shared_' + source.shareUri)[:254] if ds else 'Not created', + } + + +def resolve_share_object_statistics(context: Context, source: ShareObject, **kwargs): + if not source: + return None + return ShareObjectService.resolve_share_object_statistics(uri=source.shareUri) + + +def resolve_existing_shared_items(context: Context, source: ShareObject, **kwargs): + if not source: + return None + return ShareItemService.check_existing_shared_items(source) + + +def list_shareable_objects( + context: Context, source: ShareObject, filter: dict = None +): + if not source: + return None + if not filter: + filter = {'page': 1, 'pageSize': 5} + + is_revokable = filter.get('isRevokable') + return ShareItemService.list_shareable_objects( + share=source, + is_revokable=is_revokable, + filter=filter + ) + + +def list_shares_in_my_inbox(context: Context, source, filter: dict = None): + if not filter: + filter = {} + return ShareObjectService.list_shares_in_my_inbox(filter) + + +def list_shares_in_my_outbox(context: Context, source, filter: dict = None): + if not filter: + filter = {} + return ShareObjectService.list_shares_in_my_outbox(filter) + + +def list_shared_with_environment_data_items( + context: Context, source, environmentUri: str = None, filter: dict = None +): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return ShareItemService.paginated_shared_with_environment_datasets( + session=session, + uri=environmentUri, + data=filter, + ) + + +def update_share_request_purpose(context: Context, source, shareUri: str = None, requestPurpose: str = None): + return ShareObjectService.update_share_request_purpose( + uri=shareUri, + request_purpose=requestPurpose, + ) + + +def update_share_reject_purpose(context: Context, source, shareUri: str = None, rejectPurpose: str = None): + with context.engine.scoped_session() as session: + return ShareObjectService.update_share_reject_purpose( + uri=shareUri, reject_purpose=rejectPurpose, + ) diff --git a/backend/dataall/modules/dataset_sharing/api/types.py b/backend/dataall/modules/dataset_sharing/api/types.py new file mode 100644 index 000000000..6e41512be --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/api/types.py @@ -0,0 +1,240 @@ +from dataall.base.api import gql +from dataall.modules.dataset_sharing.api.enums import ShareableType, PrincipalType +from dataall.modules.dataset_sharing.api.resolvers import union_resolver, resolve_shared_item, resolve_dataset, \ + resolve_consumption_data, resolve_existing_shared_items, resolve_share_object_statistics, resolve_principal, \ + resolve_group, list_shareable_objects, resolve_user_role +from dataall.core.environment.api.resolvers import resolve_environment + +ShareableObject = gql.Union( + name='ShareableObject', + types=[gql.Ref('DatasetTable'), gql.Ref('DatasetStorageLocation')], + resolver=union_resolver, +) + + +ShareItem = gql.ObjectType( + name='ShareItem', + fields=[ + gql.Field(name='shareUri', type=gql.String), + gql.Field(name='shareItemUri', type=gql.ID), + gql.Field('itemUri', gql.String), + gql.Field(name='status', type=gql.Ref('ShareItemStatus')), + gql.Field(name='action', type=gql.String), + gql.Field('itemType', ShareableType.toGraphQLEnum()), + gql.Field('itemName', gql.String), + gql.Field('description', gql.String), + gql.Field( + name='sharedObject', + type=gql.Ref('ShareableObject'), + resolver=resolve_shared_item, + ), + # gql.Field(name="permission", type=gql.String) + ], +) + +NotSharedItem = gql.ObjectType( + name='NotSharedItem', + fields=[ + gql.Field('itemUri', gql.String), + gql.Field('shareItemUri', gql.String), + gql.Field('itemType', ShareableType.toGraphQLEnum()), + gql.Field('label', gql.String), + # gql.Field("permission", DatasetRole.toGraphQLEnum()), + gql.Field('tags', gql.ArrayType(gql.String)), + gql.Field('created', gql.String), + ], +) + + +NotSharedItemsSearchResult = gql.ObjectType( + name='NotSharedItemsSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(NotSharedItem)), + ], +) + + +SharedItemSearchResult = gql.ObjectType( + name='SharedItemSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('ShareItem'))), + ], +) + +ShareObjectStatistic = gql.ObjectType( + name='ShareObjectStatistic', + fields=[ + gql.Field(name='locations', type=gql.Integer), + gql.Field(name='tables', type=gql.Integer), + gql.Field(name='sharedItems', type=gql.Integer), + gql.Field(name='revokedItems', type=gql.Integer), + gql.Field(name='failedItems', type=gql.Integer), + gql.Field(name='pendingItems', type=gql.Integer), + ], +) + +DatasetLink = gql.ObjectType( + name='DatasetLink', + fields=[ + gql.Field(name='datasetUri', type=gql.String), + gql.Field(name='datasetName', type=gql.String), + gql.Field(name='SamlAdminGroupName', type=gql.String), + gql.Field(name='environmentName', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='exists', type=gql.Boolean), + ], +) + +ConsumptionData = gql.ObjectType( + name='ConsumptionData', + fields=[ + gql.Field(name='s3AccessPointName', type=gql.String), + gql.Field(name='sharedGlueDatabase', type=gql.String), + ], +) + +ShareObject = gql.ObjectType( + name='ShareObject', + fields=[ + gql.Field(name='shareUri', type=gql.ID), + gql.Field(name='status', type=gql.Ref('ShareObjectStatus')), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='deleted', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='datasetUri', type=gql.String), + gql.Field(name='requestPurpose', type=gql.String), + gql.Field(name='rejectPurpose', type=gql.String), + gql.Field(name='dataset', type=DatasetLink, resolver=resolve_dataset), + gql.Field(name='consumptionData', type=gql.Ref('ConsumptionData'), resolver=resolve_consumption_data), + gql.Field(name='existingSharedItems', type=gql.Boolean, resolver=resolve_existing_shared_items), + gql.Field( + name='statistics', + type=gql.Ref('ShareObjectStatistic'), + resolver=resolve_share_object_statistics, + ), + gql.Field( + name='principal', resolver=resolve_principal, type=gql.Ref('Principal') + ), + gql.Field( + name='environment', + resolver=resolve_environment, + type=gql.Ref('Environment'), + ), + gql.Field( + name='group', + resolver=resolve_group, + type=gql.String, + ), + gql.Field( + 'items', + args=[gql.Argument(name='filter', type=gql.Ref('ShareableObjectFilter'))], + type=gql.Ref('SharedItemSearchResult'), + resolver=list_shareable_objects, + ), + gql.Field( + name='userRoleForShareObject', + type=gql.Ref('ShareObjectPermission'), + resolver=resolve_user_role, + ), + ], +) + + +ShareSearchResult = gql.ObjectType( + name='ShareSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('ShareObject'))), + ], +) + +EnvironmentPublishedItem = gql.ObjectType( + name='EnvironmentPublishedItem', + fields=[ + gql.Field(name='shareUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='datasetName', type=gql.NonNullableType(gql.String)), + gql.Field(name='itemAccess', type=gql.NonNullableType(gql.String)), + gql.Field(name='itemType', type=gql.NonNullableType(gql.String)), + gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='principalId', type=gql.NonNullableType(gql.String)), + gql.Field(name='environmentName', type=gql.NonNullableType(gql.String)), + gql.Field(name='organizationUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='organizationName', type=gql.NonNullableType(gql.String)), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='S3AccessPointName', type=gql.String), + ], +) + + +EnvironmentPublishedItemSearchResults = gql.ObjectType( + name='EnvironmentPublishedItemSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(EnvironmentPublishedItem)), + ], +) + +Principal = gql.ObjectType( + name='Principal', + fields=[ + gql.Field(name='principalId', type=gql.ID), + gql.Field(name='principalType', type=PrincipalType.toGraphQLEnum()), + gql.Field(name='principalName', type=gql.String), + gql.Field(name='principalIAMRoleName', type=gql.String), + gql.Field(name='SamlGroupName', type=gql.String), + gql.Field(name='environmentName', type=gql.String), + gql.Field(name='environmentUri', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='organizationName', type=gql.String), + gql.Field(name='organizationUri', type=gql.String), + ], +) + + +PrincipalSearchResult = gql.ObjectType( + name='PrincipalSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(Principal)), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) diff --git a/backend/dataall/modules/dataset_sharing/aws/__init__.py b/backend/dataall/modules/dataset_sharing/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/aws/glue_client.py b/backend/dataall/modules/dataset_sharing/aws/glue_client.py new file mode 100644 index 000000000..f110d0f89 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/glue_client.py @@ -0,0 +1,132 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger(__name__) + + +class GlueClient: + def __init__(self, account_id, region, database): + aws_session = SessionHelper.remote_session(accountid=account_id) + self._client = aws_session.client('glue', region_name=region) + self._database = database + self._account_id = account_id + + def create_database(self, location): + try: + existing_database = self.database_exists() + if existing_database: + glue_database_created = True + else: + self._create_glue_database(location) + glue_database_created = True + return glue_database_created + except ClientError as e: + log.error( + f'Failed to create database {self._database} on account {self._account_id} due to {e}' + ) + raise e + + def _create_glue_database(self, location): + database = self._database + try: + db_input = { + 'Name': database, + 'Description': 'dataall database {} '.format(database), + 'CreateTableDefaultPermissions': [], + } + if location: + db_input['LocationUri'] = location + log.info(f'Creating Glue database with input: {db_input}') + response = self._client.create_database(CatalogId=self._account_id, DatabaseInput=db_input) + log.info(f'response Create Database: {response}') + return response + except ClientError as e: + log.debug(f'Failed to create database {database}', e) + raise e + + def database_exists(self): + try: + self._client.get_database(CatalogId=self._account_id, Name=self._database) + return True + except ClientError: + log.info(f'Database {self._database} does not exist on account {self._account_id}...') + return False + + def table_exists(self, table_name): + try: + table = ( + self._client.get_table( + CatalogId=self._account_id, DatabaseName=self._database, Name=table_name + ) + ) + log.info(f'Glue table found: {table_name}') + return table + except ClientError: + log.info(f'Glue table not found: {table_name}') + return None + + def delete_table(self, table_name): + database = self._database + log.info( + 'Deleting table {} in database {}'.format( + table_name, database + ) + ) + response = self._client.delete_table( + CatalogId=self._account_id, + DatabaseName=database, + Name=table_name + ) + + return response + + def create_resource_link(self, resource_link_name, resource_link_input): + account_id = self._account_id + database = self._database + + log.info( + f'Creating ResourceLink {resource_link_name} in database {account_id}://{database}' + ) + try: + resource_link = self.table_exists(resource_link_name) + if resource_link: + log.info( + f'ResourceLink {resource_link_name} already exists in database {account_id}://{database}' + ) + else: + resource_link = self._client.create_table( + CatalogId=account_id, + DatabaseName=database, + TableInput=resource_link_input, + ) + log.info( + f'Successfully created ResourceLink {resource_link_name} in database {account_id}://{database}' + ) + return resource_link + except ClientError as e: + log.error( + f'Could not create ResourceLink {resource_link_name} ' + f'in database {account_id}://{database} ' + f'due to: {e}' + ) + raise e + + def delete_database(self): + account_id = self._account_id + database = self._database + + log.info(f'Deleting database {account_id}://{database} ...') + try: + if self.database_exists(): + self._client.delete_database(CatalogId=account_id, Name=database) + return True + except ClientError as e: + log.error( + f'Could not delete database {database} ' + f'in account {account_id} ' + f'due to: {e}' + ) + raise e diff --git a/backend/dataall/modules/dataset_sharing/aws/kms_client.py b/backend/dataall/modules/dataset_sharing/aws/kms_client.py new file mode 100644 index 000000000..645df769a --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/kms_client.py @@ -0,0 +1,54 @@ +import logging + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger(__name__) + + +class KmsClient: + _DEFAULT_POLICY_NAME = "default" + + def __init__(self, account_id: str, region: str): + session = SessionHelper.remote_session(accountid=account_id) + self._client = session.client('kms', region_name=region) + self._account_id = account_id + + def put_key_policy(self, key_id: str, policy: str): + try: + self._client.put_key_policy( + KeyId=key_id, + PolicyName=self._DEFAULT_POLICY_NAME, + Policy=policy, + ) + except Exception as e: + log.error( + f'Failed to attach policy to KMS key {key_id} on {self._account_id} : {e} ' + ) + raise e + + def get_key_policy(self, key_id: str): + try: + response = self._client.get_key_policy( + KeyId=key_id, + PolicyName=self._DEFAULT_POLICY_NAME, + ) + except Exception as e: + log.error( + f'Failed to get kms key policy of key {key_id} : {e}' + ) + return None + else: + return response['Policy'] + + def get_key_id(self, key_alias: str): + try: + response = self._client.describe_key( + KeyId=key_alias, + ) + except Exception as e: + log.error( + f'Failed to get kms key id of {key_alias} : {e}' + ) + return None + else: + return response['KeyMetadata']['KeyId'] diff --git a/backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py b/backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py new file mode 100644 index 000000000..f506742c1 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/lakeformation_client.py @@ -0,0 +1,299 @@ +import logging +import uuid + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger('aws:lakeformation') + + +class LakeFormationClient: + def __init__(self): + pass + + @staticmethod + def grant_pivot_role_all_database_permissions(accountid, region, database): + LakeFormationClient.grant_permissions_to_database( + client=SessionHelper.remote_session(accountid=accountid).client( + 'lakeformation', region_name=region + ), + principals=[SessionHelper.get_delegation_role_arn(accountid)], + database_name=database, + permissions=['ALL'], + ) + + @staticmethod + def grant_permissions_to_database( + client, + principals, + database_name, + permissions, + ): + for principal in principals: + log.info( + f'Granting database permissions {permissions} to {principal} on database {database_name}' + ) + try: + client.grant_permissions( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={ + 'Database': {'Name': database_name}, + }, + Permissions=permissions, + ) + log.info( + f'Successfully granted principal {principal} permissions {permissions} ' + f'to {database_name}' + ) + except ClientError as e: + log.error( + f'Could not grant permissions ' + f'principal {principal} ' + f'{permissions} to database {database_name} due to: {e}' + ) + + @staticmethod + def grant_permissions_to_table( + client, + principal, + database_name, + table_name, + permissions, + permissions_with_grant_options=None, + ): + try: + grant_dict = dict( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={'Table': {'DatabaseName': database_name, 'Name': table_name}}, + Permissions=permissions, + ) + if permissions_with_grant_options: + grant_dict[ + 'PermissionsWithGrantOption' + ] = permissions_with_grant_options + + response = client.grant_permissions(**grant_dict) + + log.info( + f'Successfully granted principal {principal} permissions {permissions} ' + f'to {database_name}.{table_name}: {response}' + ) + except ClientError as e: + log.warning( + f'Could not grant principal {principal} ' + f'permissions {permissions} to table ' + f'{database_name}.{table_name} due to: {e}' + ) + # raise e + + @staticmethod + def revoke_iamallowedgroups_super_permission_from_table( + client, accountid, database, table + ): + """ + When upgrading to LF tables can still have IAMAllowedGroups permissions + Unless this is revoked the table can not be shared using LakeFormation + :param client: + :param accountid: + :param database: + :param table: + :return: + """ + try: + log.info( + f'Revoking IAMAllowedGroups Super ' + f'permission for table {database}|{table}' + ) + LakeFormationClient.batch_revoke_permissions( + client, + accountid, + entries=[ + { + 'Id': str(uuid.uuid4()), + 'Principal': {'DataLakePrincipalIdentifier': 'EVERYONE'}, + 'Resource': { + 'Table': { + 'DatabaseName': database, + 'Name': table, + 'CatalogId': accountid, + } + }, + 'Permissions': ['ALL'], + 'PermissionsWithGrantOption': [], + } + ], + ) + except ClientError as e: + log.debug( + f'Could not revoke IAMAllowedGroups Super ' + f'permission on table {database}|{table} due to {e}' + ) + + @staticmethod + def batch_revoke_permissions(client, accountid, entries): + """ + Batch revoke permissions to entries + Retry is set for api throttling + :param client: + :param accountid: + :param entries: + :return: + """ + log.info(f'Batch Revoking {entries}') + entries_chunks: list = [entries[i : i + 20] for i in range(0, len(entries), 20)] + failures = [] + try: + for entries_chunk in entries_chunks: + response = client.batch_revoke_permissions( + CatalogId=accountid, Entries=entries_chunk + ) + log.info(f'Batch Revoke response: {response}') + failures.extend(response.get('Failures')) + + for failure in failures: + if not ( + failure['Error']['ErrorCode'] == 'InvalidInputException' + and ( + 'Grantee has no permissions' in failure['Error']['ErrorMessage'] + or 'No permissions revoked' in failure['Error']['ErrorMessage'] + or 'not found' in failure['Error']['ErrorMessage'] + ) + ): + raise ClientError( + error_response={ + 'Error': { + 'Code': 'LakeFormationClient.batch_revoke_permissions', + 'Message': f'Operation ended with failures: {failures}', + } + }, + operation_name='LakeFormationClient.batch_revoke_permissions', + ) + + except ClientError as e: + log.warning(f'Batch Revoke ended with failures: {failures}') + raise e + + @staticmethod + def grant_resource_link_permission_on_target(client, source, target): + for principal in target['principals']: + try: + table_grant = dict( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={ + 'TableWithColumns': { + 'DatabaseName': source['database'], + 'Name': source['tablename'], + 'ColumnWildcard': {}, + 'CatalogId': source['accountid'], + } + }, + Permissions=['DESCRIBE', 'SELECT'], + PermissionsWithGrantOption=[], + ) + client.grant_permissions(**table_grant) + log.info( + f'Successfully granted permissions DESCRIBE,SELECT to {principal} on target ' + f'{source["accountid"]}://{source["database"]}/{source["tablename"]}' + ) + except ClientError as e: + logging.error( + f'Failed granting principal {principal} ' + 'read access to resource link on target' + f' {source["accountid"]}://{source["database"]}/{source["tablename"]} ' + f'due to: {e}' + ) + raise e + + @staticmethod + def grant_resource_link_permission(client, source, target, target_database): + for principal in target['principals']: + resourcelink_grant = dict( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={ + 'Table': { + 'DatabaseName': target_database, + 'Name': source['tablename'], + 'CatalogId': target['accountid'], + } + }, + # Resource link only supports DESCRIBE and DROP permissions no SELECT + Permissions=['DESCRIBE'], + ) + try: + client.grant_permissions(**resourcelink_grant) + log.info( + f'Granted resource link DESCRIBE access ' + f'to principal {principal} on {target["accountid"]}://{target_database}/{source["tablename"]}' + ) + except ClientError as e: + logging.error( + f'Failed granting principal {principal} ' + f'read access to resource link on {target["accountid"]}://{target_database}/{source["tablename"]} ' + f'due to: {e}' + ) + raise e + + @staticmethod + def revoke_source_table_access(**data): + """ + Revokes permissions for a principal in a cross account sharing setup + Parameters + ---------- + data : + + Returns + ------- + + """ + logging.info(f'Revoking source table access: {data} ...') + target_accountid = data['target_accountid'] + region = data['region'] + target_principals = data['target_principals'] + source_database = data['source_database'] + source_table = data['source_table'] + source_accountid = data['source_accountid'] + for target_principal in target_principals: + try: + + aws_session = SessionHelper.remote_session(target_accountid) + lakeformation = aws_session.client('lakeformation', region_name=region) + + logging.info('Revoking DESCRIBE permission...') + lakeformation.revoke_permissions( + Principal=dict(DataLakePrincipalIdentifier=target_principal), + Resource=dict( + Table=dict( + CatalogId=source_accountid, + DatabaseName=source_database, + Name=source_table, + ) + ), + Permissions=['DESCRIBE'], + PermissionsWithGrantOption=[], + ) + logging.info('Successfully revoked DESCRIBE permissions') + + logging.info('Revoking SELECT permission...') + lakeformation.revoke_permissions( + Principal=dict(DataLakePrincipalIdentifier=target_principal), + Resource=dict( + TableWithColumns=dict( + CatalogId=source_accountid, + DatabaseName=source_database, + Name=source_table, + ColumnWildcard={}, + ) + ), + Permissions=['SELECT'], + PermissionsWithGrantOption=[], + ) + logging.info('Successfully revoked DESCRIBE permissions') + + except ClientError as e: + logging.error( + f'Failed to revoke permissions for {target_principal} ' + f'on source table {source_accountid}/{source_database}/{source_table} ' + f'due to: {e}' + ) + raise e diff --git a/backend/dataall/modules/dataset_sharing/aws/ram_client.py b/backend/dataall/modules/dataset_sharing/aws/ram_client.py new file mode 100644 index 000000000..b210314ac --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/ram_client.py @@ -0,0 +1,161 @@ +import logging +import time + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger('aws:ram') + + +class RamClient: + def __init__(self, account_id, region): + session = SessionHelper.remote_session(accountid=account_id) + self._client = session.client('ram', region_name=region) + self._account_id = account_id + + def _get_resource_share_invitations( + self, resource_share_arns, receiver_account + ): + sender_account = self._account_id + log.info(f'Listing invitations for resourceShareArns: {resource_share_arns}') + try: + resource_share_invitations = [] + + paginator = self._client.get_paginator('get_resource_share_invitations') + invitation_pages = paginator.paginate(resourceShareArns=resource_share_arns) + for page in invitation_pages: + resource_share_invitations.extend(page.get('resourceShareInvitations')) + + filtered_invitations = [ + i + for i in resource_share_invitations + if i['senderAccountId'] == sender_account + and i['receiverAccountId'] == receiver_account + ] + return filtered_invitations + except ClientError as e: + log.error( + f'Failed retrieving RAM resource ' + f'share invitations {resource_share_arns} due to {e}' + ) + raise e + + def _accept_resource_share_invitation(self, resource_share_invitation_arn): + try: + response = self._client.accept_resource_share_invitation( + resourceShareInvitationArn=resource_share_invitation_arn + ) + log.info(f'Accepted ram invitation {resource_share_invitation_arn}') + return response.get('resourceShareInvitation') + except ClientError as e: + if ( + e.response['Error']['Code'] + == 'ResourceShareInvitationAlreadyAcceptedException' + ): + log.info( + f'Failed to accept RAM invitation ' + f'{resource_share_invitation_arn} already accepted' + ) + else: + log.error( + f'Failed to accept RAM invitation ' + f'{resource_share_invitation_arn} due to {e}' + ) + raise e + + @staticmethod + def accept_ram_invitation(**data): + """ + Accepts RAM invitations on the target account + """ + retry_share_table = False + failed_invitations = [] + source = data['source'] + target = data['target'] + + if source['accountid'] == target['accountid']: + log.debug('Skipping RAM invitation management for same account sharing.') + return True + + source_ram = RamClient(source['accountid'], target['region']) + target_ram = RamClient(target['accountid'], target['region']) + + resource_arn = ( + f'arn:aws:glue:{source["region"]}:{source["accountid"]}:' + f'table/{data["source"]["database"]}/{data["source"]["tablename"]}' + ) + associations = source_ram._list_resource_share_associations(resource_arn) + resource_share_arns = [a['resourceShareArn'] for a in associations] + + ram_invitations = target_ram._get_resource_share_invitations( + resource_share_arns, source['accountid'], + ) + log.info( + f'Found {len(ram_invitations)} RAM invitations for resourceShareArn: {resource_share_arns}' + ) + for invitation in ram_invitations: + if 'LakeFormation' in invitation['resourceShareName']: + if invitation['status'] == 'PENDING': + log.info( + f'Invitation {invitation} is in PENDING status accepting it ...' + ) + target_ram._accept_resource_share_invitation( + invitation['resourceShareInvitationArn'] + ) + # Ram invitation acceptance is slow + time.sleep(5) + elif ( + invitation['status'] == 'EXPIRED' + or invitation['status'] == 'REJECTED' + ): + log.warning( + f'Invitation {invitation} has expired or was rejected. ' + 'Table flagged for revoke re-share.' + 'Deleting the resource share to reset the invitation... ' + ) + failed_invitations.append(invitation) + retry_share_table = True + source_ram._delete_resource_share( + resource_share_arn=invitation['resourceShareArn'] + ) + + elif invitation['status'] == 'ACCEPTED': + log.info( + f'Invitation {invitation} already accepted nothing to do ...' + ) + else: + log.warning( + f'Invitation is in an unknown status adding {invitation["status"]}. ' + 'Adding it to retry share list ...' + ) + + return retry_share_table, failed_invitations + + def _list_resource_share_associations(self, resource_arn): + associations = [] + try: + log.debug(f'RAM list_resource_share_associations : {resource_arn}') + + paginator = self._client.get_paginator( + 'get_resource_share_associations' + ).paginate( + associationType='RESOURCE', + resourceArn=resource_arn, + ) + for page in paginator: + associations.extend(page['resourceShareAssociations']) + + log.info(f'Found resource_share_associations : {associations}') + return associations + + except ClientError as e: + log.error( + f'Could not find resource share associations for resource {resource_arn} due to: {e}' + ) + raise e + + def _delete_resource_share(self, resource_share_arn): + self._client.delete_resource_share( + resource_share_arn=resource_share_arn + ) diff --git a/backend/dataall/modules/dataset_sharing/aws/s3_client.py b/backend/dataall/modules/dataset_sharing/aws/s3_client.py new file mode 100755 index 000000000..78b0296ce --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/aws/s3_client.py @@ -0,0 +1,159 @@ +import logging + +from dataall.base.aws.sts import SessionHelper + +log = logging.getLogger(__name__) + + +class S3ControlClient: + def __init__(self, account_id: str, region: str): + session = SessionHelper.remote_session(accountid=account_id) + self._client = session.client('s3control', region_name=region) + self._account_id = account_id + + def get_bucket_access_point_arn(self, access_point_name: str): + try: + access_point = self._client.get_access_point( + AccountId=self._account_id, + Name=access_point_name, + ) + except Exception as e: + log.info( + f'Failed to get S3 bucket access point {access_point_name} on {self._account_id} : {e}' + ) + return None + else: + return access_point["AccessPointArn"] + + def create_bucket_access_point(self, bucket_name: str, access_point_name: str): + try: + access_point = self._client.create_access_point( + AccountId=self._account_id, + Name=access_point_name, + Bucket=bucket_name, + ) + except Exception as e: + log.error( + f'S3 bucket access point creation failed for location {bucket_name} : {e}' + ) + raise e + else: + return access_point["AccessPointArn"] + + def delete_bucket_access_point(self, access_point_name: str): + try: + self._client.delete_access_point( + AccountId=self._account_id, + Name=access_point_name, + ) + except Exception as e: + log.error( + f'Failed to delete S3 bucket access point {access_point_name}/{self._account_id} : {e}' + ) + raise e + + def get_access_point_policy(self, access_point_name: str): + try: + response = self._client.get_access_point_policy( + AccountId=self._account_id, + Name=access_point_name, + ) + except Exception as e: + log.info( + f'Failed to get policy of access point {access_point_name} on {self._account_id} : {e}' + ) + return None + else: + return response['Policy'] + + def attach_access_point_policy(self, access_point_name: str, policy: str): + try: + self._client.put_access_point_policy( + AccountId=self._account_id, + Name=access_point_name, + Policy=policy + ) + except Exception as e: + log.error( + f'S3 bucket access point policy creation failed : {e}' + ) + raise e + + @staticmethod + def generate_access_point_policy_template( + principal_id: str, + access_point_arn: str, + s3_prefix: str, + ): + policy = { + 'Version': '2012-10-17', + "Statement": [ + { + "Sid": f"{principal_id}0", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Action": "s3:ListBucket", + "Resource": f"{access_point_arn}", + "Condition": { + "StringLike": { + "s3:prefix": [f"{s3_prefix}/*"], + "aws:userId": [f"{principal_id}:*"] + } + } + }, + { + "Sid": f"{principal_id}1", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Action": "s3:GetObject", + "Resource": [f"{access_point_arn}/object/{s3_prefix}/*"], + "Condition": { + "StringLike": { + "aws:userId": [f"{principal_id}:*"] + } + } + } + ] + } + return policy + + +class S3Client: + def __init__(self, account_id, region): + session = SessionHelper.remote_session(accountid=account_id) + self._client = session.client('s3', region_name=region) + self._account_id = account_id + + def create_bucket_policy(self, bucket_name: str, policy: str): + try: + s3cli = self._client + s3cli.put_bucket_policy( + Bucket=bucket_name, + Policy=policy, + ConfirmRemoveSelfBucketAccess=False, + ExpectedBucketOwner=self._account_id, + ) + log.info( + f'Created bucket policy of {bucket_name} on {self._account_id} successfully' + ) + except Exception as e: + log.error( + f'Bucket policy created failed on bucket {bucket_name} of {self._account_id} : {e}' + ) + raise e + + def get_bucket_policy(self, bucket_name: str): + try: + s3cli = self._client + response = s3cli.get_bucket_policy(Bucket=bucket_name, ExpectedBucketOwner=self._account_id) + except Exception as e: + log.warning( + f'Failed to get bucket policy of {bucket_name} : {e}' + ) + return None + else: + return response['Policy'] diff --git a/backend/dataall/modules/dataset_sharing/cdk/__init__.py b/backend/dataall/modules/dataset_sharing/cdk/__init__.py new file mode 100644 index 000000000..4081eb8be --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/cdk/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.dataset_sharing.cdk import pivot_role_data_sharing_policy + +__all__ = ["pivot_role_data_sharing_policy"] diff --git a/backend/dataall/modules/dataset_sharing/cdk/pivot_role_data_sharing_policy.py b/backend/dataall/modules/dataset_sharing/cdk/pivot_role_data_sharing_policy.py new file mode 100644 index 000000000..8e5d76286 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/cdk/pivot_role_data_sharing_policy.py @@ -0,0 +1,102 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class DataSharingPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with Athena + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # For access point sharing and S3 bucket sharing + iam.PolicyStatement( + sid='IAMRolePolicy', + effect=iam.Effect.ALLOW, + actions=[ + 'iam:PutRolePolicy', + 'iam:DeleteRolePolicy' + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='ManagedAccessPoints', + effect=iam.Effect.ALLOW, + actions=[ + 's3:GetAccessPoint', + 's3:GetAccessPointPolicy', + 's3:ListAccessPoints', + 's3:CreateAccessPoint', + 's3:DeleteAccessPoint', + 's3:GetAccessPointPolicyStatus', + 's3:DeleteAccessPointPolicy', + 's3:PutAccessPointPolicy', + ], + resources=[f'arn:aws:s3:*:{self.account}:accesspoint/*'], + ), + # For LakeFormation named-resource sharing + iam.PolicyStatement( + sid='RamTag', + effect=iam.Effect.ALLOW, + actions=['ram:TagResource'], + resources=['*'], + conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, + ), + iam.PolicyStatement( + sid='RamCreateResource', + effect=iam.Effect.ALLOW, + actions=['ram:CreateResourceShare'], + resources=['*'], + conditions={ + 'ForAllValues:StringEquals': { + 'ram:RequestedResourceType': ['glue:Table', 'glue:Database', 'glue:Catalog'] + } + }, + ), + iam.PolicyStatement( + sid='RamUpdateResource', + effect=iam.Effect.ALLOW, + actions=['ram:UpdateResourceShare'], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], + conditions={ + 'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}, + }, + ), + iam.PolicyStatement( + sid='RamAssociateResource', + effect=iam.Effect.ALLOW, + actions=[ + 'ram:AssociateResourceShare', + 'ram:DisassociateResourceShare' + ], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'], + conditions={'ForAllValues:StringLike': {'ram:ResourceShareName': ['LakeFormation*']}}, + ), + iam.PolicyStatement( + sid='RamDeleteResource', + effect=iam.Effect.ALLOW, + actions=['ram:DeleteResourceShare'], + resources=[f'arn:aws:ram:*:{self.account}:resource-share/*'] + ), + iam.PolicyStatement( + sid='RamInvitations', + effect=iam.Effect.ALLOW, + actions=[ + 'ram:AcceptResourceShareInvitation', + 'ram:RejectResourceShareInvitation', + 'ram:EnableSharingWithAwsOrganization', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='RamRead', + effect=iam.Effect.ALLOW, + actions=[ + 'ram:Get*', + 'ram:List*' + ], + resources=['*'], + ) + ] + return statements diff --git a/backend/dataall/modules/dataset_sharing/db/__init__.py b/backend/dataall/modules/dataset_sharing/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/db/enums.py b/backend/dataall/modules/dataset_sharing/db/enums.py new file mode 100644 index 000000000..b46c35e9a --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/db/enums.py @@ -0,0 +1,69 @@ +from enum import Enum + + +class ShareObjectStatus(Enum): + Deleted = 'Deleted' + Approved = 'Approved' + Rejected = 'Rejected' + Revoked = 'Revoked' + Draft = 'Draft' + Submitted = 'Submitted' + Revoke_In_Progress = 'Revoke_In_Progress' + Share_In_Progress = 'Share_In_Progress' + Processed = 'Processed' + + +class ShareObjectPermission(Enum): + Approvers = '999' + Requesters = '800' + DatasetAdmins = '700' + NoPermission = '000' + + +class ShareItemStatus(Enum): + Deleted = 'Deleted' + PendingApproval = 'PendingApproval' + Share_Approved = 'Share_Approved' + Share_Rejected = 'Share_Rejected' + Share_In_Progress = 'Share_In_Progress' + Share_Succeeded = 'Share_Succeeded' + Share_Failed = 'Share_Failed' + Revoke_Approved = 'Revoke_Approved' + Revoke_In_Progress = 'Revoke_In_Progress' + Revoke_Failed = 'Revoke_Failed' + Revoke_Succeeded = 'Revoke_Succeeded' + + +class ShareObjectActions(Enum): + Submit = 'Submit' + Approve = 'Approve' + Reject = 'Reject' + RevokeItems = 'RevokeItems' + Start = 'Start' + Finish = 'Finish' + FinishPending = 'FinishPending' + Delete = 'Delete' + + +class ShareItemActions(Enum): + AddItem = 'AddItem' + RemoveItem = 'RemoveItem' + Failure = 'Failure' + Success = 'Success' + + +class ShareableType(Enum): + Table = 'DatasetTable' + StorageLocation = 'DatasetStorageLocation' + View = 'View' + + +class PrincipalType(Enum): + Any = 'Any' + Organization = 'Organization' + Environment = 'Environment' + User = 'User' + Project = 'Project' + Public = 'Public' + Group = 'Group' + ConsumptionRole = 'ConsumptionRole' diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_models.py b/backend/dataall/modules/dataset_sharing/db/share_object_models.py new file mode 100644 index 000000000..42a69a861 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/db/share_object_models.py @@ -0,0 +1,60 @@ +from datetime import datetime, timedelta +from uuid import uuid4 + +from sqlalchemy import Boolean, Column, String, DateTime +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base, utils +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus + + +def in_one_month(): + return datetime.now() + timedelta(days=31) + + +def _uuid4(): + return str(uuid4()) + + +class ShareObject(Base): + __tablename__ = 'share_object' + shareUri = Column( + String, nullable=False, primary_key=True, default=utils.uuid('share') + ) + datasetUri = Column(String, nullable=False) + environmentUri = Column(String) + groupUri = Column(String) + principalIAMRoleName = Column(String, nullable=True) + principalId = Column(String, nullable=True) + principalType = Column(String, nullable=True, default='Group') + status = Column(String, nullable=False, default=ShareObjectStatus.Draft.value) + owner = Column(String, nullable=False) + created = Column(DateTime, default=datetime.now) + updated = Column(DateTime, onupdate=datetime.now) + deleted = Column(DateTime) + confirmed = Column(Boolean, default=False) + requestPurpose = Column(String, nullable=True) + rejectPurpose = Column(String, nullable=True) + userRoleForShareObject = query_expression() + existingSharedItems = query_expression() + + +class ShareObjectItem(Base): + __tablename__ = 'share_object_item' + shareUri = Column(String, nullable=False) + shareItemUri = Column( + String, default=utils.uuid('shareitem'), nullable=False, primary_key=True + ) + itemType = Column(String, nullable=False) + itemUri = Column(String, nullable=False) + itemName = Column(String, nullable=False) + permission = Column(String, nullable=True) + created = Column(DateTime, nullable=False, default=datetime.now) + updated = Column(DateTime, nullable=True, onupdate=datetime.now) + deleted = Column(DateTime, nullable=True) + owner = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=True) + GlueTableName = Column(String, nullable=True) + S3AccessPointName = Column(String, nullable=True) + status = Column(String, nullable=False, default=ShareItemStatus.PendingApproval.value) + action = Column(String, nullable=True) diff --git a/backend/dataall/modules/dataset_sharing/db/share_object_repositories.py b/backend/dataall/modules/dataset_sharing/db/share_object_repositories.py new file mode 100644 index 000000000..2841265be --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/db/share_object_repositories.py @@ -0,0 +1,1162 @@ +import logging +from typing import List + +from sqlalchemy import and_, or_, func, case +from sqlalchemy.orm import Query + +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.core.organizations.db.organization_models import Organization +from dataall.base.db import exceptions, paginate +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareObjectStatus, ShareItemActions, \ + ShareItemStatus, ShareableType, PrincipalType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, DatasetTable, Dataset + +logger = logging.getLogger(__name__) + + +class Transition: + def __init__(self, name, transitions): + self._name = name + self._transitions = transitions + self._all_source_states = [*set([item for sublist in transitions.values() for item in sublist])] + self._all_target_states = [item for item in transitions.keys()] + + def validate_transition(self, prev_state): + if prev_state in self._all_target_states: + logger.info(f'Resource is already in target state ({prev_state}) in {self._all_target_states}') + return False + elif prev_state not in self._all_source_states: + raise exceptions.UnauthorizedOperation( + action=self._name, + message=f'This transition is not possible, {prev_state} cannot go to {self._all_target_states}. ' + f'If there is a sharing or revoking in progress wait until it is complete and try again.', + ) + else: + return True + + def get_transition_target(self, prev_state): + if self.validate_transition(prev_state): + for target_state, list_prev_states in self._transitions.items(): + if prev_state in list_prev_states: + return target_state + else: + pass + else: + return prev_state + + +class ShareObjectSM: + def __init__(self, state): + self._state = state + self.transitionTable = { + ShareObjectActions.Submit.value: Transition( + name=ShareObjectActions.Submit.value, + transitions={ + ShareObjectStatus.Submitted.value: [ + ShareObjectStatus.Draft.value, + ShareObjectStatus.Rejected.value + ] + } + ), + ShareObjectActions.Approve.value: Transition( + name=ShareObjectActions.Approve.value, + transitions={ + ShareObjectStatus.Approved.value: [ + ShareObjectStatus.Submitted.value + ] + } + ), + ShareObjectActions.Reject.value: Transition( + name=ShareObjectActions.Reject.value, + transitions={ + ShareObjectStatus.Rejected.value: [ + ShareObjectStatus.Submitted.value + ] + } + ), + ShareObjectActions.RevokeItems.value: Transition( + name=ShareObjectActions.RevokeItems.value, + transitions={ + ShareObjectStatus.Revoked.value: [ + ShareObjectStatus.Draft.value, + ShareObjectStatus.Submitted.value, + ShareObjectStatus.Rejected.value, + ShareObjectStatus.Processed.value + ] + } + ), + ShareObjectActions.Start.value: Transition( + name=ShareObjectActions.Start.value, + transitions={ + ShareObjectStatus.Share_In_Progress.value: [ + ShareObjectStatus.Approved.value + ], + ShareObjectStatus.Revoke_In_Progress.value: [ + ShareObjectStatus.Revoked.value + ] + } + ), + ShareObjectActions.Finish.value: Transition( + name=ShareObjectActions.Finish.value, + transitions={ + ShareObjectStatus.Processed.value: [ + ShareObjectStatus.Share_In_Progress.value, + ShareObjectStatus.Revoke_In_Progress.value + ], + } + ), + ShareObjectActions.FinishPending.value: Transition( + name=ShareObjectActions.FinishPending.value, + transitions={ + ShareObjectStatus.Draft.value: [ + ShareObjectStatus.Revoke_In_Progress.value, + ], + } + ), + ShareObjectActions.Delete.value: Transition( + name=ShareObjectActions.Delete.value, + transitions={ + ShareObjectStatus.Deleted.value: [ + ShareObjectStatus.Rejected.value, + ShareObjectStatus.Draft.value, + ShareObjectStatus.Submitted.value, + ShareObjectStatus.Processed.value + ] + } + ), + ShareItemActions.AddItem.value: Transition( + name=ShareItemActions.AddItem.value, + transitions={ + ShareObjectStatus.Draft.value: [ + ShareObjectStatus.Submitted.value, + ShareObjectStatus.Rejected.value, + ShareObjectStatus.Processed.value + ] + } + ), + } + + def run_transition(self, transition): + trans = self.transitionTable[transition] + new_state = trans.get_transition_target(self._state) + return new_state + + def update_state(self, session, share, new_state): + logger.info(f"Updating share object {share.shareUri} in DB from {self._state} to state {new_state}") + ShareObjectRepository.update_share_object_status( + session=session, + share_uri=share.shareUri, + status=new_state + ) + self._state = new_state + return True + + +class ShareItemSM: + def __init__(self, state): + self._state = state + self.transitionTable = { + ShareItemActions.AddItem.value: Transition( + name=ShareItemActions.AddItem.value, + transitions={ + ShareItemStatus.PendingApproval.value: [ShareItemStatus.Deleted.value] + } + ), + ShareObjectActions.Submit.value: Transition( + name=ShareObjectActions.Submit.value, + transitions={ + ShareItemStatus.PendingApproval.value: [ + ShareItemStatus.Share_Rejected.value, + ShareItemStatus.Share_Failed.value + ], + ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], + ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], + ShareItemStatus.Share_Approved.value: [ShareItemStatus.Share_Approved.value], + ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], + ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], + ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], + ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], + } + ), + ShareObjectActions.Approve.value: Transition( + name=ShareObjectActions.Approve.value, + transitions={ + ShareItemStatus.Share_Approved.value: [ShareItemStatus.PendingApproval.value], + ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], + ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], + ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], + ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], + ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], + ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], + } + ), + ShareObjectActions.Reject.value: Transition( + name=ShareObjectActions.Reject.value, + transitions={ + ShareItemStatus.Share_Rejected.value: [ShareItemStatus.PendingApproval.value], + ShareItemStatus.Revoke_Approved.value: [ShareItemStatus.Revoke_Approved.value], + ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_Failed.value], + ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_Succeeded.value], + ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_Succeeded.value], + ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_In_Progress.value], + ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_In_Progress.value], + } + ), + ShareObjectActions.Start.value: Transition( + name=ShareObjectActions.Start.value, + transitions={ + ShareItemStatus.Share_In_Progress.value: [ShareItemStatus.Share_Approved.value], + ShareItemStatus.Revoke_In_Progress.value: [ShareItemStatus.Revoke_Approved.value], + } + ), + ShareItemActions.Success.value: Transition( + name=ShareItemActions.Success.value, + transitions={ + ShareItemStatus.Share_Succeeded.value: [ShareItemStatus.Share_In_Progress.value], + ShareItemStatus.Revoke_Succeeded.value: [ShareItemStatus.Revoke_In_Progress.value], + } + ), + ShareItemActions.Failure.value: Transition( + name=ShareItemActions.Failure.value, + transitions={ + ShareItemStatus.Share_Failed.value: [ShareItemStatus.Share_In_Progress.value], + ShareItemStatus.Revoke_Failed.value: [ShareItemStatus.Revoke_In_Progress.value], + } + ), + ShareItemActions.RemoveItem.value: Transition( + name=ShareItemActions.RemoveItem.value, + transitions={ + ShareItemStatus.Deleted.value: [ + ShareItemStatus.PendingApproval.value, + ShareItemStatus.Share_Rejected.value, + ShareItemStatus.Share_Failed.value, + ShareItemStatus.Revoke_Succeeded.value + ] + } + ), + ShareObjectActions.RevokeItems.value: Transition( + name=ShareObjectActions.RevokeItems.value, + transitions={ + ShareItemStatus.Revoke_Approved.value: [ + ShareItemStatus.Share_Succeeded.value, + ShareItemStatus.Revoke_Failed.value, + ShareItemStatus.Revoke_Approved.value + ] + } + ), + ShareObjectActions.Delete.value: Transition( + name=ShareObjectActions.Delete.value, + transitions={ + ShareItemStatus.Deleted.value: [ + ShareItemStatus.PendingApproval.value, + ShareItemStatus.Share_Rejected.value, + ShareItemStatus.Share_Failed.value, + ShareItemStatus.Revoke_Succeeded.value + ] + } + ) + } + + def run_transition(self, transition): + trans = self.transitionTable[transition] + new_state = trans.get_transition_target(self._state) + return new_state + + def update_state(self, session, share_uri, new_state): + if share_uri and (new_state != self._state): + if new_state == ShareItemStatus.Deleted.value: + logger.info(f"Deleting share items in DB in {self._state} state") + ShareObjectRepository.delete_share_item_status_batch( + session=session, + share_uri=share_uri, + status=self._state + ) + else: + logger.info(f"Updating share items in DB from {self._state} to state {new_state}") + ShareObjectRepository.update_share_item_status_batch( + session=session, + share_uri=share_uri, + old_status=self._state, + new_status=new_state + ) + self._state = new_state + else: + logger.info(f"Share Items in DB already in target state {new_state} or no update is required") + return True + + def update_state_single_item(self, session, share_item, new_state): + logger.info(f"Updating share item in DB {share_item.shareItemUri} status to {new_state}") + ShareObjectRepository.update_share_item_status( + session=session, + uri=share_item.shareItemUri, + status=new_state + ) + self._state = new_state + return True + + @staticmethod + def get_share_item_shared_states(): + return [ + ShareItemStatus.Share_Succeeded.value, + ShareItemStatus.Share_In_Progress.value, + ShareItemStatus.Revoke_Failed.value, + ShareItemStatus.Revoke_In_Progress.value, + ShareItemStatus.Revoke_Approved.value + ] + + @staticmethod + def get_share_item_revokable_states(): + return [ + ShareItemStatus.Share_Succeeded.value, + ShareItemStatus.Revoke_Failed.value, + ] + + +class ShareEnvironmentResource(EnvironmentResource): + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return ShareObjectRepository.count_principal_shares(session, group_uri, PrincipalType.Group) + + @staticmethod + def count_role_resources(session, role_uri): + return ShareObjectRepository.count_principal_shares(session, role_uri, PrincipalType.ConsumptionRole) + + @staticmethod + def delete_env(session, environment): + ShareObjectRepository.delete_all_share_items(session, environment.environmentUri) + + +class ShareObjectRepository: + @staticmethod + def save_and_commit(session, share): + session.add(share) + session.commit() + + @staticmethod + def find_share(session, dataset: Dataset, env, principal_id, group_uri) -> ShareObject: + return ( + session.query(ShareObject) + .filter( + and_( + ShareObject.datasetUri == dataset.datasetUri, + ShareObject.principalId == principal_id, + ShareObject.environmentUri == env.environmentUri, + ShareObject.groupUri == group_uri, + ) + ) + .first() + ) + + @staticmethod + def get_share_item(session, item_type, item_uri): + if item_type == ShareableType.Table.value: + return session.query(DatasetTable).get(item_uri) + if item_type == ShareableType.StorageLocation.value: + return session.query(DatasetStorageLocation).get(item_uri) + + @staticmethod + def get_share_by_uri(session, uri): + share = session.query(ShareObject).get(uri) + if not share: + raise exceptions.ObjectNotFound('Share', uri) + return share + + @staticmethod + def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner): + share: ShareObject = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset_uri) + .filter(ShareObject.owner == dataset_owner) + .first() + ) + return share + + @staticmethod + def remove_share_object_item(session, share_item): + session.delete(share_item) + return True + + @staticmethod + def check_existing_shared_items(session, uri): + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + shared_items = session.query(ShareObjectItem).filter( + and_( + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.status.in_(share_item_shared_states) + ) + ).all() + if shared_items: + return True + return False + + @staticmethod + def count_sharable_items(session, uri, share_type): + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == uri, + ShareObjectItem.itemType == share_type, + ) + ) + .count() + ) + + @staticmethod + def find_sharable_item(session, share_uri, item_uri) -> ShareObjectItem: + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.itemUri == item_uri, + ShareObjectItem.shareUri == share_uri, + ) + ) + .first() + ) + + @staticmethod + def count_items_in_states(session, uri, states): + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == uri, + ShareObjectItem.status.in_(states), + ) + ) + .count() + ) + + @staticmethod + def check_existing_shared_items_of_type(session, uri, item_type): + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + shared_items = session.query(ShareObjectItem).filter( + and_( + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.itemType == item_type, + ShareObjectItem.status.in_(share_item_shared_states) + ) + ).all() + if shared_items: + return True + return False + + @staticmethod + def check_pending_share_items(session, uri): + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, uri) + shared_items = session.query(ShareObjectItem).filter( + and_( + ShareObjectItem.shareUri == share.shareUri, + ShareObjectItem.status.in_([ShareItemStatus.PendingApproval.value]) + ) + ).all() + if shared_items: + return True + return False + + @staticmethod + def get_share_item_by_uri(session, uri): + share_item: ShareObjectItem = session.query(ShareObjectItem).get(uri) + if not share_item: + raise exceptions.ObjectNotFound('ShareObjectItem', uri) + + return share_item + + @staticmethod + def list_shareable_items(session, share, states, data): + # All tables from dataset with a column isShared + # marking the table as part of the shareObject + tables = ( + session.query( + DatasetTable.tableUri.label('itemUri'), + func.coalesce('DatasetTable').label('itemType'), + DatasetTable.GlueTableName.label('itemName'), + DatasetTable.description.label('description'), + ShareObjectItem.shareItemUri.label('shareItemUri'), + ShareObjectItem.status.label('status'), + case( + [(ShareObjectItem.shareItemUri.isnot(None), True)], + else_=False, + ).label('isShared'), + ) + .outerjoin( + ShareObjectItem, + and_( + ShareObjectItem.shareUri == share.shareUri, + DatasetTable.tableUri == ShareObjectItem.itemUri, + ), + ) + .filter(DatasetTable.datasetUri == share.datasetUri) + ) + if states: + tables = tables.filter(ShareObjectItem.status.in_(states)) + + # All folders from the dataset with a column isShared + # marking the folder as part of the shareObject + locations = ( + session.query( + DatasetStorageLocation.locationUri.label('itemUri'), + func.coalesce('DatasetStorageLocation').label('itemType'), + DatasetStorageLocation.S3Prefix.label('itemName'), + DatasetStorageLocation.description.label('description'), + ShareObjectItem.shareItemUri.label('shareItemUri'), + ShareObjectItem.status.label('status'), + case( + [(ShareObjectItem.shareItemUri.isnot(None), True)], + else_=False, + ).label('isShared'), + ) + .outerjoin( + ShareObjectItem, + and_( + ShareObjectItem.shareUri == share.shareUri, + DatasetStorageLocation.locationUri + == ShareObjectItem.itemUri, + ), + ) + .filter(DatasetStorageLocation.datasetUri == share.datasetUri) + ) + if states: + locations = locations.filter(ShareObjectItem.status.in_(states)) + + shareable_objects = tables.union(locations).subquery('shareable_objects') + query = session.query(shareable_objects) + + if data: + if data.get('term'): + term = data.get('term') + query = query.filter( + or_( + shareable_objects.c.itemName.ilike(term + '%'), + shareable_objects.c.description.ilike(term + '%'), + ) + ) + if 'isShared' in data.keys(): + is_shared = data.get('isShared') + query = query.filter(shareable_objects.c.isShared == is_shared) + + return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() + + @staticmethod + def list_user_received_share_requests(session, username, groups, data=None): + query = ( + session.query(ShareObject) + .join( + Dataset, + Dataset.datasetUri == ShareObject.datasetUri, + ) + .filter( + or_( + Dataset.businessOwnerEmail == username, + Dataset.businessOwnerDelegationEmails.contains( + f'{{{username}}}' + ), + Dataset.stewards.in_(groups), + Dataset.SamlAdminGroupName.in_(groups), + ) + ) + ) + return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() + + @staticmethod + def list_user_sent_share_requests(session, username, groups, data=None): + query = ( + session.query(ShareObject) + .join( + Environment, + Environment.environmentUri == ShareObject.environmentUri, + ) + .filter( + or_( + ShareObject.owner == username, + and_( + ShareObject.groupUri.in_(groups), + ShareObject.principalType.in_([PrincipalType.Group.value, PrincipalType.ConsumptionRole.value]) + ), + ) + ) + ) + return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict() + + @staticmethod + def get_share_by_dataset_and_environment(session, dataset_uri, environment_uri): + environment_groups = session.query(EnvironmentGroup).filter( + EnvironmentGroup.environmentUri == environment_uri + ) + groups = [g.groupUri for g in environment_groups] + share = session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.environmentUri == environment_uri, + ShareObject.groupUri.in_(groups), + ) + ) + if not share: + raise exceptions.ObjectNotFound('Share', f'{dataset_uri}/{environment_uri}') + return share + + @staticmethod + def update_share_object_status(session, share_uri: str, status: str) -> ShareObject: + share = ShareObjectRepository.get_share_by_uri(session, share_uri) + share.status = status + session.commit() + return share + + @staticmethod + def update_share_item_status( + session, + uri: str, + status: str, + ) -> ShareObjectItem: + + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) + share_item.status = status + session.commit() + return share_item + + @staticmethod + def delete_share_item_status_batch( + session, + share_uri: str, + status: str, + ): + ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.status == status + ) + ) + .delete() + ) + + @staticmethod + def update_share_item_status_batch( + session, + share_uri: str, + old_status: str, + new_status: str, + ) -> bool: + + ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.status == old_status + ) + ) + .update( + { + ShareObjectItem.status: new_status, + } + ) + ) + return True + + @staticmethod + def get_share_data(session, share_uri): + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, share_uri) + + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + + source_environment: Environment = session.query(Environment).get( + dataset.environmentUri + ) + if not source_environment: + raise exceptions.ObjectNotFound('SourceEnvironment', dataset.environmentUri) + + target_environment: Environment = session.query(Environment).get( + share.environmentUri + ) + if not target_environment: + raise exceptions.ObjectNotFound('TargetEnvironment', share.environmentUri) + + env_group: EnvironmentGroup = ( + session.query(EnvironmentGroup) + .filter( + and_( + EnvironmentGroup.environmentUri == share.environmentUri, + EnvironmentGroup.groupUri == share.groupUri, + ) + ) + .first() + ) + if not env_group: + raise Exception( + f'Share object Team {share.groupUri} is not a member of the ' + f'environment {target_environment.name}/{target_environment.AwsAccountId}' + ) + + source_env_group: EnvironmentGroup = ( + session.query(EnvironmentGroup) + .filter( + and_( + EnvironmentGroup.environmentUri == dataset.environmentUri, + EnvironmentGroup.groupUri == dataset.SamlAdminGroupName, + ) + ) + .first() + ) + if not source_env_group: + raise Exception( + f'Share object Team {dataset.SamlAdminGroupName} is not a member of the ' + f'environment {dataset.environmentUri}' + ) + + return ( + source_env_group, + env_group, + dataset, + share, + source_environment, + target_environment, + ) + + @staticmethod + def get_share_data_items(session, share_uri, status): + share: ShareObject = ShareObjectRepository.get_share_by_uri(session, share_uri) + + tables = ShareObjectRepository._find_all_share_item( + session, share, status, DatasetTable, DatasetTable.tableUri + ) + + folders = ShareObjectRepository._find_all_share_item( + session, share, status, DatasetStorageLocation, DatasetStorageLocation.locationUri + ) + + return ( + tables, + folders, + ) + + @staticmethod + def _find_all_share_item(session, share, status, share_type_model, share_type_uri): + return ( + session.query(share_type_model) + .join( + ShareObjectItem, + ShareObjectItem.itemUri == share_type_uri, + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .filter( + and_( + ShareObject.datasetUri == share.datasetUri, + ShareObject.environmentUri == share.environmentUri, + ShareObject.shareUri == share.shareUri, + ShareObjectItem.status == status, + ) + ) + .all() + ) + + @staticmethod + def find_all_share_items(session, share_uri, share_type): + return ( + session.query(ShareObjectItem).filter( + ( + and_( + ShareObjectItem.shareUri == share_uri, + ShareObjectItem.itemType == share_type + ) + ) + ).all() + ) + + @staticmethod + def other_approved_share_object_exists(session, environment_uri, dataset_uri): + return ( + session.query(ShareObject) + .filter( + and_( + Environment.environmentUri == environment_uri, + ShareObject.status == ShareObjectStatus.Approved.value, + ShareObject.datasetUri == dataset_uri, + ) + ) + .all() + ) + + @staticmethod + def get_share_items_states(session, share_uri, item_uris=None): + query = ( + session.query(ShareObjectItem) + .join( + ShareObject, + ShareObjectItem.shareUri == ShareObject.shareUri, + ) + .filter( + and_( + ShareObject.shareUri == share_uri, + ) + ) + ) + if item_uris: + query = query.filter(ShareObjectItem.shareItemUri.in_(item_uris)) + return [item.status for item in query.distinct(ShareObjectItem.status)] + + @staticmethod + def has_shared_items(session, item_uri: str) -> int: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + return ( + session.query(ShareObjectItem) + .filter( + and_( + ShareObjectItem.itemUri == item_uri, + ShareObjectItem.status.in_(share_item_shared_states) + ) + ) + .count() + ) + + @staticmethod + def delete_shares(session, item_uri: str): + session.query(ShareObjectItem).filter(ShareObjectItem.itemUri == item_uri).delete() + + @staticmethod + def delete_shares_with_no_shared_items(session, dataset_uri): + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + shares = ( + session.query(ShareObject) + .outerjoin( + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri + ) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObjectItem.status.notin_(share_item_shared_states), + ) + ) + .all() + ) + for share in shares: + share_items = ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) + .all() + ) + for item in share_items: + session.delete(item) + + share_obj = ( + session.query(ShareObject) + .filter(ShareObject.shareUri == share.shareUri) + .first() + ) + session.delete(share_obj) + + @staticmethod + def _query_user_datasets(session, username, groups, filter) -> Query: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + query = ( + session.query(Dataset) + .outerjoin( + ShareObject, + ShareObject.datasetUri == Dataset.datasetUri, + ) + .outerjoin( + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri + ) + .filter( + or_( + Dataset.owner == username, + Dataset.SamlAdminGroupName.in_(groups), + Dataset.stewards.in_(groups), + and_( + ShareObject.principalId.in_(groups), + ShareObjectItem.status.in_(share_item_shared_states), + ), + and_( + ShareObject.owner == username, + ShareObjectItem.status.in_(share_item_shared_states), + ), + ) + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + Dataset.description.ilike(filter.get('term') + '%%'), + Dataset.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + @staticmethod + def paginated_user_datasets( + session, username, groups, data=None + ) -> dict: + return paginate( + query=ShareObjectRepository._query_user_datasets(session, username, groups, data), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def find_dataset_shares(session, dataset_uri): + return ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def query_dataset_shares(session, dataset_uri) -> Query: + return session.query(ShareObject).filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ) + ) + + @staticmethod + def paginated_dataset_shares(session, uri, data=None) -> [ShareObject]: + query = ShareObjectRepository.query_dataset_shares(session, uri) + return paginate( + query=query, page=data.get('page', 1), page_size=data.get('pageSize', 5) + ).to_dict() + + @staticmethod + def list_dataset_shares_with_existing_shared_items(session, dataset_uri) -> [ShareObject]: + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + query = ( + session.query(ShareObject) + .outerjoin( + ShareObjectItem, + ShareObjectItem.shareUri == ShareObject.shareUri + ) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ShareObjectItem.status.in_(share_item_shared_states), + ) + ) + ) + return query.all() + + @staticmethod + def delete_all_share_items(session, env_uri): + env_shared_with_objects = ( + session.query(ShareObject) + .filter(ShareObject.environmentUri == env_uri) + .all() + ) + for share in env_shared_with_objects: + ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.shareUri == share.shareUri) + .delete() + ) + session.delete(share) + + @staticmethod + def paginate_shared_datasets(session, env_uri, data): + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + q = ( + session.query( + ShareObjectItem.shareUri.label('shareUri'), + Dataset.datasetUri.label('datasetUri'), + Dataset.name.label('datasetName'), + Dataset.description.label('datasetDescription'), + Environment.environmentUri.label('environmentUri'), + Environment.name.label('environmentName'), + ShareObject.created.label('created'), + ShareObject.principalId.label('principalId'), + ShareObject.principalType.label('principalType'), + ShareObjectItem.itemType.label('itemType'), + ShareObjectItem.GlueDatabaseName.label('GlueDatabaseName'), + ShareObjectItem.GlueTableName.label('GlueTableName'), + ShareObjectItem.S3AccessPointName.label('S3AccessPointName'), + Organization.organizationUri.label('organizationUri'), + Organization.name.label('organizationName'), + case( + [ + ( + ShareObjectItem.itemType + == ShareableType.Table.value, + func.concat( + DatasetTable.GlueDatabaseName, + '.', + DatasetTable.GlueTableName, + ), + ), + ( + ShareObjectItem.itemType + == ShareableType.StorageLocation.value, + func.concat(DatasetStorageLocation.name), + ), + ], + else_='XXX XXXX', + ).label('itemAccess'), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + Dataset, + ShareObject.datasetUri == Dataset.datasetUri, + ) + .join( + Environment, + Environment.environmentUri == Dataset.environmentUri, + ) + .join( + Organization, + Organization.organizationUri + == Environment.organizationUri, + ) + .outerjoin( + DatasetTable, + ShareObjectItem.itemUri == DatasetTable.tableUri, + ) + .outerjoin( + DatasetStorageLocation, + ShareObjectItem.itemUri + == DatasetStorageLocation.locationUri, + ) + .filter( + and_( + ShareObjectItem.status.in_(share_item_shared_states), + ShareObject.environmentUri == env_uri, + ) + ) + ) + + if data.get('datasetUri'): + dataset_uri = data.get('datasetUri') + q = q.filter(ShareObject.datasetUri == dataset_uri) + + if data.get('itemTypes', None): + item_types = data.get('itemTypes') + q = q.filter( + or_(*[ShareObjectItem.itemType == t for t in item_types]) + ) + + if data.get("uniqueShares", False): + q = q.filter(ShareObject.principalType != PrincipalType.ConsumptionRole.value) + q = q.distinct(ShareObject.shareUri) + + if data.get('term'): + term = data.get('term') + q = q.filter(ShareObjectItem.itemName.ilike('%' + term + '%')) + + return paginate( + query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def find_share_items_by_item_uri(session, item_uri): + return ( + session.query(ShareObjectItem) + .filter(ShareObjectItem.itemUri == item_uri) + .all() + ) + + @staticmethod + def get_approved_share_object(session, item): + share_object: ShareObject = ( + session.query(ShareObject) + .filter( + and_( + ShareObject.shareUri == item.shareUri, + ShareObject.status == ShareObjectStatus.Approved.value, + ) + ) + .first() + ) + return share_object + + @staticmethod + def get_shared_tables(session, dataset) -> List[ShareObjectItem]: + return ( + session.query( + DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), + DatasetTable.GlueTableName.label('GlueTableName'), + DatasetTable.S3Prefix.label('S3Prefix'), + DatasetTable.AWSAccountId.label('SourceAwsAccountId'), + DatasetTable.region.label('SourceRegion'), + Environment.AwsAccountId.label('TargetAwsAccountId'), + Environment.region.label('TargetRegion'), + ) + .join( + ShareObjectItem, + and_( + ShareObjectItem.itemUri == DatasetTable.tableUri + ), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + Environment, + Environment.environmentUri == ShareObject.environmentUri, + ) + .filter( + and_( + DatasetTable.datasetUri == dataset.datasetUri, + DatasetTable.deleted.is_(None), + ShareObjectItem.status == ShareObjectStatus.Approved.value, + ) + ) + ).all() + + @staticmethod + def get_shared_folders(session, dataset) -> List[DatasetStorageLocation]: + return ( + session.query( + DatasetStorageLocation.locationUri.label('locationUri'), + DatasetStorageLocation.S3BucketName.label('S3BucketName'), + DatasetStorageLocation.S3Prefix.label('S3Prefix'), + Environment.AwsAccountId.label('AwsAccountId'), + Environment.region.label('region'), + ) + .join( + ShareObjectItem, + and_( + ShareObjectItem.itemUri == DatasetStorageLocation.locationUri + ), + ) + .join( + ShareObject, + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .join( + Environment, + Environment.environmentUri == ShareObject.environmentUri, + ) + .filter( + and_( + DatasetStorageLocation.datasetUri == dataset.datasetUri, + DatasetStorageLocation.deleted.is_(None), + ShareObjectItem.status == ShareObjectStatus.Approved.value, + ) + ) + ).all() + + @staticmethod + def count_principal_shares(session, principal_id: str, principal_type: PrincipalType): + return ( + session.query(ShareObject) + .filter( + and_( + ShareObject.principalId == principal_id, + ShareObject.principalType == principal_type.value + ) + ) + .count() + ) diff --git a/backend/dataall/modules/dataset_sharing/handlers/__init__.py b/backend/dataall/modules/dataset_sharing/handlers/__init__.py new file mode 100644 index 000000000..eb86af3de --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/handlers/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.dataset_sharing.handlers import ecs_share_handler + +__all__ = ["ecs_share_handler"] diff --git a/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py new file mode 100644 index 000000000..a5c75f947 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/handlers/ecs_share_handler.py @@ -0,0 +1,43 @@ + +import logging +import os + +from dataall.core.tasks.service_handlers import Worker +from dataall.core.stacks.aws.ecs import Ecs +from dataall.core.tasks.db.task_models import Task +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService + +log = logging.getLogger(__name__) + + +class EcsShareHandler: + @staticmethod + @Worker.handler(path='ecs.share.approve') + def approve_share(engine, task: Task): + return EcsShareHandler._manage_share(engine, task, DataSharingService.approve_share, 'approve_share') + + @staticmethod + @Worker.handler(path='ecs.share.revoke') + def revoke_share(engine, task: Task): + return EcsShareHandler._manage_share(engine, task, DataSharingService.revoke_share, 'revoke_share') + + @staticmethod + def _manage_share(engine, task: Task, local_handler, ecs_handler: str): + envname = os.environ.get('envname', 'local') + if envname in ['local', 'dkrcompose']: + return local_handler(engine, task.targetUri) + else: + return EcsShareHandler._run_share_management_ecs_task( + share_uri=task.targetUri, handler=ecs_handler + ) + + @staticmethod + def _run_share_management_ecs_task(share_uri, handler): + return Ecs.run_ecs_task( + task_definition_param='ecs/task_def_arn/share_management', + container_name_param='ecs/container/share_management', + context=[ + {'name': 'shareUri', 'value': share_uri}, + {'name': 'handler', 'value': handler}, + ], + ) diff --git a/backend/dataall/modules/dataset_sharing/services/__init__.py b/backend/dataall/modules/dataset_sharing/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py new file mode 100644 index 000000000..9ed1aec81 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/data_sharing_service.py @@ -0,0 +1,220 @@ +import logging + +from dataall.modules.dataset_sharing.services.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare +from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare +from dataall.modules.dataset_sharing.services.share_processors.s3_process_share import ProcessS3Share + +from dataall.base.db import Engine +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemStatus, ShareableType +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectSM, ShareObjectRepository, ShareItemSM + +log = logging.getLogger(__name__) + + +class DataSharingService: + def __init__(self): + pass + + @classmethod + def approve_share(cls, engine: Engine, share_uri: str) -> bool: + """ + 1) Updates share object State Machine with the Action: Start + 2) Retrieves share data and items in Share_Approved state + 3) Calls sharing folders processor to grant share + 4) Calls sharing tables processor for same or cross account sharing to grant share + 5) Updates share object State Machine with the Action: Finish + + Parameters + ---------- + engine : db.engine + share_uri : share uri + + Returns + ------- + True if sharing succeeds, + False if folder or table sharing failed + """ + with engine.scoped_session() as session: + ( + source_env_group, + env_group, + dataset, + share, + source_environment, + target_environment, + ) = ShareObjectRepository.get_share_data(session, share_uri) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.Start.value) + share_sm.update_state(session, share, new_share_state) + + ( + shared_tables, + shared_folders + ) = ShareObjectRepository.get_share_data_items(session, share_uri, ShareItemStatus.Share_Approved.value) + + log.info(f'Granting permissions to folders: {shared_folders}') + + approved_folders_succeed = ProcessS3Share.process_approved_shares( + session, + dataset, + share, + shared_folders, + source_environment, + target_environment, + source_env_group, + env_group + ) + log.info(f'sharing folders succeeded = {approved_folders_succeed}') + + if source_environment.AwsAccountId != target_environment.AwsAccountId: + processor = ProcessLFCrossAccountShare( + session, + dataset, + share, + shared_tables, + [], + source_environment, + target_environment, + env_group, + ) + else: + processor = ProcessLFSameAccountShare( + session, + dataset, + share, + shared_tables, + [], + source_environment, + target_environment, + env_group + ) + + log.info(f'Granting permissions to tables: {shared_tables}') + approved_tables_succeed = processor.process_approved_shares() + log.info(f'sharing tables succeeded = {approved_tables_succeed}') + + new_share_state = share_sm.run_transition(ShareObjectActions.Finish.value) + share_sm.update_state(session, share, new_share_state) + + return approved_tables_succeed if approved_folders_succeed else False + + @classmethod + def revoke_share(cls, engine: Engine, share_uri: str): + """ + 1) Updates share object State Machine with the Action: Start + 2) Retrieves share data and items in Revoke_Approved state + 3) Calls sharing folders processor to revoke share + 4) Checks if remaining folders are shared and effectuates clean up with folders processor + 5) Calls sharing tables processor for same or cross account sharing to revoke share + 6) Checks if remaining tables are shared and effectuates clean up with tables processor + 7) Updates share object State Machine with the Action: Finish + + Parameters + ---------- + engine : db.engine + share_uri : share uri + + Returns + ------- + True if revoke succeeds + False if folder or table revoking failed + """ + + with engine.scoped_session() as session: + ( + source_env_group, + env_group, + dataset, + share, + source_environment, + target_environment, + ) = ShareObjectRepository.get_share_data(session, share_uri) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.Start.value) + share_sm.update_state(session, share, new_share_state) + + revoked_item_sm = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + + ( + revoked_tables, + revoked_folders + ) = ShareObjectRepository.get_share_data_items(session, share_uri, ShareItemStatus.Revoke_Approved.value) + + new_state = revoked_item_sm.run_transition(ShareObjectActions.Start.value) + revoked_item_sm.update_state(session, share_uri, new_state) + + log.info(f'Revoking permissions to folders: {revoked_folders}') + + revoked_folders_succeed = ProcessS3Share.process_revoked_shares( + session, + dataset, + share, + revoked_folders, + source_environment, + target_environment, + source_env_group, + env_group, + ) + log.info(f'revoking folders succeeded = {revoked_folders_succeed}') + existing_shared_items = ShareObjectRepository.check_existing_shared_items_of_type( + session, + share_uri, + ShareableType.StorageLocation.value + ) + log.info(f'Still remaining S3 resources shared = {existing_shared_items}') + if not existing_shared_items and revoked_folders: + log.info("Clean up S3 access points...") + clean_up_folders = ProcessS3Share.clean_up_share( + dataset=dataset, + share=share, + target_environment=target_environment + ) + log.info(f"Clean up S3 successful = {clean_up_folders}") + + if source_environment.AwsAccountId != target_environment.AwsAccountId: + processor = ProcessLFCrossAccountShare( + session, + dataset, + share, + [], + revoked_tables, + source_environment, + target_environment, + env_group, + ) + else: + processor = ProcessLFSameAccountShare( + session, + dataset, + share, + [], + revoked_tables, + source_environment, + target_environment, + env_group) + + log.info(f'Revoking permissions to tables: {revoked_tables}') + revoked_tables_succeed = processor.process_revoked_shares() + log.info(f'revoking tables succeeded = {revoked_tables_succeed}') + + existing_shared_items = ShareObjectRepository.check_existing_shared_items_of_type( + session, + share_uri, + ShareableType.Table.value + ) + log.info(f'Still remaining LF resources shared = {existing_shared_items}') + if not existing_shared_items and revoked_tables: + log.info("Clean up LF remaining resources...") + clean_up_tables = processor.clean_up_share() + log.info(f"Clean up LF successful = {clean_up_tables}") + + existing_pending_items = ShareObjectRepository.check_pending_share_items(session, share_uri) + if existing_pending_items: + new_share_state = share_sm.run_transition(ShareObjectActions.FinishPending.value) + else: + new_share_state = share_sm.run_transition(ShareObjectActions.Finish.value) + share_sm.update_state(session, share, new_share_state) + + return revoked_tables_succeed and revoked_folders_succeed diff --git a/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py new file mode 100644 index 000000000..ae225f99f --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/dataset_alarm_service.py @@ -0,0 +1,151 @@ +import logging +from datetime import datetime + +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset, DatasetStorageLocation +from dataall.base.utils.alarm_service import AlarmService + +log = logging.getLogger(__name__) + + +class DatasetAlarmService(AlarmService): + """Contains set of alarms for datasets""" + + def trigger_table_sharing_failure_alarm( + self, + table: DatasetTable, + share: ShareObject, + target_environment: Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Table {table.GlueTableName} Sharing Failure Notification' + ) + message = f""" + You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation. + + Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: Lake Formation sharing failure + - Timestamp: {datetime.now()} + + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {table.AWSAccountId} + - Region: {table.region} + - Glue Database: {table.GlueDatabaseName} + - Glue Table: {table.GlueTableName} + + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} + - Glue Database: {table.GlueDatabaseName}shared + """ + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_revoke_table_sharing_failure_alarm( + self, + table: DatasetTable, + share: ShareObject, + target_environment: Environment, + ): + log.info('Triggering share failure alarm...') + subject = f'ALARM: DATAALL Table {table.GlueTableName} Revoking LF permissions Failure Notification' + message = f""" + You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation. + + Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: Lake Formation sharing failure + - Timestamp: {datetime.now()} + + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {table.AWSAccountId} + - Region: {table.region} + - Glue Database: {table.GlueDatabaseName} + - Glue Table: {table.GlueTableName} + + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} + - Glue Database: {table.GlueDatabaseName}shared + """ + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str): + log.info(f'Triggering dataset {dataset.name} tables sync failure alarm...') + subject = ( + f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog. + +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: {error} + - Timestamp: {datetime.now()} + Dataset + - Dataset URI: {dataset.datasetUri} + - AWS Account: {dataset.AwsAccountId} + - Region: {dataset.region} + - Glue Database: {dataset.GlueDatabaseName} + """ + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_folder_sharing_failure_alarm( + self, + folder: DatasetStorageLocation, + share: ShareObject, + target_environment: Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: S3 Folder sharing failure + - Timestamp: {datetime.now()} + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {folder.AWSAccountId} + - Region: {folder.region} + - S3 Bucket: {folder.S3BucketName} + - S3 Folder: {folder.S3Prefix} + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} +""" + return self.publish_message_to_alarms_topic(subject, message) + + def trigger_revoke_folder_sharing_failure_alarm( + self, + folder: DatasetStorageLocation, + share: ShareObject, + target_environment: Environment, + ): + log.info('Triggering share failure alarm...') + subject = ( + f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Revoke Failure Notification' + ) + message = f""" +You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. +Alarm Details: + - State Change: OK -> ALARM + - Reason for State Change: S3 Folder sharing Revoke failure + - Timestamp: {datetime.now()} + Share Source + - Dataset URI: {share.datasetUri} + - AWS Account: {folder.AWSAccountId} + - Region: {folder.region} + - S3 Bucket: {folder.S3BucketName} + - S3 Folder: {folder.S3Prefix} + Share Target + - AWS Account: {target_environment.AwsAccountId} + - Region: {target_environment.region} +""" + return self.publish_message_to_alarms_topic(subject, message) diff --git a/backend/dataall/modules/dataset_sharing/services/share_exceptions.py b/backend/dataall/modules/dataset_sharing/services/share_exceptions.py new file mode 100644 index 000000000..706c61ccf --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_exceptions.py @@ -0,0 +1,12 @@ + + +class ShareItemsFound(Exception): + def __init__(self, action, message): + self.action = action + self.message = f""" + An error occurred (ShareItemsFound) when calling {self.action} operation: + {message} + """ + + def __str__(self): + return f'{self.message}' diff --git a/backend/dataall/modules/dataset_sharing/services/share_item_service.py b/backend/dataall/modules/dataset_sharing/services/share_item_service.py new file mode 100644 index 000000000..855b42027 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_item_service.py @@ -0,0 +1,171 @@ +import logging + +from dataall.core.tasks.service_handlers import Worker +from dataall.base.context import get_context +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission +from dataall.core.tasks.db.task_models import Task +from dataall.base.db import utils +from dataall.base.db.exceptions import ObjectNotFound, UnauthorizedOperation +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ + ShareItemActions +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_exceptions import ShareItemsFound +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.dataset_sharing.services.share_permissions import GET_SHARE_OBJECT, ADD_ITEM, REMOVE_ITEM, \ + LIST_ENVIRONMENT_SHARED_WITH_OBJECTS +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) + + +class ShareItemService: + @staticmethod + def _get_share_uri(session, uri): + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) + share = ShareObjectRepository.get_share_by_uri(session, share_item.shareUri) + return share.shareUri + + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def revoke_items_share_object(uri, revoked_uris): + context = get_context() + with context.db_engine.scoped_session() as session: + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + revoked_items_states = ShareObjectRepository.get_share_items_states(session, uri, revoked_uris) + revoked_items = [ShareObjectRepository.get_share_item_by_uri(session, uri) for uri in revoked_uris] + + if not revoked_items_states: + raise ShareItemsFound( + action='Revoke Items from Share Object', + message='Nothing to be revoked.', + ) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareObjectActions.RevokeItems.value) + + for item_state in revoked_items_states: + item_sm = ShareItemSM(item_state) + new_state = item_sm.run_transition(ShareObjectActions.RevokeItems.value) + for item in revoked_items: + if item.status == item_state: + item_sm.update_state_single_item(session, item, new_state) + + share_sm.update_state(session, share, new_share_state) + + ResourcePolicy.delete_resource_policy( + session=session, + group=share.groupUri, + resource_uri=dataset.datasetUri, + ) + + ShareNotificationService.notify_share_object_rejection(session, context.username, dataset, share) + + revoke_share_task: Task = Task( + action='ecs.share.revoke', + targetUri=uri, + payload={'environmentUri': share.environmentUri}, + ) + session.add(revoke_share_task) + + Worker.queue(engine=context.db_engine, task_ids=[revoke_share_task.taskUri]) + + return share + + @staticmethod + @has_resource_permission(ADD_ITEM) + def add_shared_item(uri: str, data: dict = None): + context = get_context() + with context.db_engine.scoped_session() as session: + item_type = data.get('itemType') + item_uri = data.get('itemUri') + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + target_environment = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(ShareItemActions.AddItem.value) + share_sm.update_state(session, share, new_share_state) + + item = ShareObjectRepository.get_share_item(session, item_type, item_uri) + if not item: + raise ObjectNotFound('ShareObjectItem', item_uri) + + if item_type == ShareableType.Table.value and item.region != target_environment.region: + raise UnauthorizedOperation( + action=ADD_ITEM, + message=f'Lake Formation cross region sharing is not supported. ' + f'Table {item.GlueTableName} is in {item.region} and target environment ' + f'{target_environment.name} is in {target_environment.region} ', + ) + + share_item: ShareObjectItem = ShareObjectRepository.find_sharable_item(session, uri, item_uri) + + s3_access_point_name = utils.slugify( + share.datasetUri + '-' + share.principalId, + max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' + ) + log.info(f"S3AccessPointName={s3_access_point_name}") + + if not share_item: + share_item = ShareObjectItem( + shareUri=uri, + itemUri=item_uri, + itemType=item_type, + itemName=item.name, + status=ShareItemStatus.PendingApproval.value, + owner=context.username, + GlueDatabaseName=dataset.GlueDatabaseName + if item_type == ShareableType.Table.value + else '', + GlueTableName=item.GlueTableName + if item_type == ShareableType.Table.value + else '', + S3AccessPointName=s3_access_point_name + if item_type == ShareableType.StorageLocation.value + else '', + ) + session.add(share_item) + return share_item + + @staticmethod + @has_resource_permission(REMOVE_ITEM, parent_resource=_get_share_uri) + def remove_shared_item(uri: str): + with get_context().db_engine.scoped_session() as session: + share_item = ShareObjectRepository.get_share_item_by_uri(session, uri) + + item_sm = ShareItemSM(share_item.status) + item_sm.run_transition(ShareItemActions.RemoveItem.value) + ShareObjectRepository.remove_share_object_item(session, share_item) + return True + + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def resolve_shared_item(uri, item: ShareObjectItem): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.get_share_item(session, item.itemType, item.itemUri) + + @staticmethod + def check_existing_shared_items(share): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.check_existing_shared_items( + session, share.shareUri + ) + + @staticmethod + def list_shareable_objects(share, filter, is_revokable=False): + states = None + if is_revokable: + states = ShareItemSM.get_share_item_revokable_states() + + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.list_shareable_items(session, share, states, filter) + + @staticmethod + @has_resource_permission(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + def paginated_shared_with_environment_datasets(session, uri, data) -> dict: + return ShareObjectRepository.paginate_shared_datasets(session, uri, data) diff --git a/backend/dataall/tasks/data_sharing/share_managers/__init__.py b/backend/dataall/modules/dataset_sharing/services/share_managers/__init__.py similarity index 100% rename from backend/dataall/tasks/data_sharing/share_managers/__init__.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/__init__.py diff --git a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py similarity index 76% rename from backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py index 22bba64ca..2d2a74281 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/lf_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/lf_share_manager.py @@ -5,13 +5,16 @@ from botocore.exceptions import ClientError -from ....aws.handlers.glue import Glue -from ....aws.handlers.lakeformation import LakeFormation -from ....aws.handlers.quicksight import Quicksight -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.ram import Ram -from ....db import api, exceptions, models -from ....utils.alarm_service import AlarmService +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.modules.dataset_sharing.aws.glue_client import GlueClient +from dataall.modules.dataset_sharing.aws.lakeformation_client import LakeFormationClient +from dataall.base.aws.quicksight import QuicksightClient +from dataall.base.aws.sts import SessionHelper +from dataall.base.db import exceptions +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject logger = logging.getLogger(__name__) @@ -20,13 +23,13 @@ class LFShareManager: def __init__( self, session, - dataset: models.Dataset, - share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], - source_environment: models.Environment, - target_environment: models.Environment, - env_group: models.EnvironmentGroup, + dataset: Dataset, + share: ShareObject, + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], + source_environment: Environment, + target_environment: Environment, + env_group: EnvironmentGroup, ): self.session = session self.env_group = env_group @@ -59,8 +62,10 @@ def get_share_principals(self) -> [str]: List of principals """ principals = [f"arn:aws:iam::{self.target_environment.AwsAccountId}:role/{self.share.principalIAMRoleName}"] - if self.target_environment.dashboardsEnabled: - group = Quicksight.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) + dashboard_enabled = EnvironmentService.get_boolean_env_param(self.session, self.target_environment, "dashboardsEnabled") + + if dashboard_enabled: + group = QuicksightClient.create_quicksight_group(AwsAccountId=self.target_environment.AwsAccountId) if group and group.get('Group'): group_arn = group.get('Group').get('Arn') if group_arn: @@ -72,10 +77,6 @@ def build_shared_db_name(self) -> str: """ Build Glue shared database name. Unique per share Uri. - Parameters - ---------- - dataset : models.Dataset - share : models.ShareObject Returns ------- @@ -83,7 +84,7 @@ def build_shared_db_name(self) -> str: """ return (self.dataset.GlueDatabaseName + '_shared_' + self.share.shareUri)[:254] - def build_share_data(self, table: models.DatasetTable) -> dict: + def build_share_data(self, table: DatasetTable) -> dict: """ Build aws dict for boto3 operations on Glue and LF from share data Parameters @@ -111,7 +112,7 @@ def build_share_data(self, table: models.DatasetTable) -> dict: return data def check_share_item_exists_on_glue_catalog( - self, share_item: models.ShareObjectItem, table: models.DatasetTable + self, share_item: ShareObjectItem, table: DatasetTable ) -> None: """ Checks if a table in the share request @@ -126,12 +127,12 @@ def check_share_item_exists_on_glue_catalog( ------- exceptions.AWSResourceNotFound """ - if not Glue.table_exists( - accountid=self.source_environment.AwsAccountId, + glue_client = GlueClient( + account_id=self.source_environment.AwsAccountId, region=self.source_environment.region, database=table.GlueDatabaseName, - tablename=table.GlueTableName, - ): + ) + if not glue_client.table_exists(table.GlueTableName): raise exceptions.AWSResourceNotFound( action='ProcessShare', message=( @@ -144,7 +145,7 @@ def grant_pivot_role_all_database_permissions(self) -> bool: """ Grants 'ALL' database Lake Formation permissions to data.all PivotRole """ - LakeFormation.grant_pivot_role_all_database_permissions( + LakeFormationClient.grant_pivot_role_all_database_permissions( self.source_environment.AwsAccountId, self.source_environment.region, self.dataset.GlueDatabaseName, @@ -154,8 +155,8 @@ def grant_pivot_role_all_database_permissions(self) -> bool: @classmethod def create_shared_database( cls, - target_environment: models.Environment, - dataset: models.Dataset, + target_environment: Environment, + dataset: Dataset, shared_db_name: str, principals: [str], ) -> dict: @@ -182,18 +183,17 @@ def create_shared_database( f'{target_environment.AwsAccountId}://{shared_db_name}' ) - database = Glue.create_database( - target_environment.AwsAccountId, - shared_db_name, - target_environment.region, - f's3://{dataset.S3BucketName}', - ) + database = GlueClient( + account_id=target_environment.AwsAccountId, + database=shared_db_name, + region=target_environment.region + ).create_database(f's3://{dataset.S3BucketName}') - LakeFormation.grant_pivot_role_all_database_permissions( + LakeFormationClient.grant_pivot_role_all_database_permissions( target_environment.AwsAccountId, target_environment.region, shared_db_name ) - LakeFormation.grant_permissions_to_database( + LakeFormationClient.grant_permissions_to_database( client=SessionHelper.remote_session( accountid=target_environment.AwsAccountId ).client('lakeformation', region_name=target_environment.region), @@ -213,11 +213,7 @@ def delete_shared_database(self) -> bool: bool """ logger.info(f'Deleting shared database {self.shared_db_name}') - return Glue.delete_database( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - ) + return self.glue_client().delete_database() @classmethod def create_resource_link(cls, **data) -> dict: @@ -248,19 +244,17 @@ def create_resource_link(cls, **data) -> dict: } try: - resource_link = Glue.create_resource_link( - accountid=target['accountid'], - region=target['region'], - database=target_database, + glue_client = GlueClient(target['accountid'], target['region'], target_database) + resource_link = glue_client.create_resource_link( resource_link_name=source['tablename'], resource_link_input=resource_link_input, ) - LakeFormation.grant_resource_link_permission( + LakeFormationClient.grant_resource_link_permission( lakeformation_client, source, target, target_database ) - LakeFormation.grant_resource_link_permission_on_target( + LakeFormationClient.grant_resource_link_permission_on_target( lakeformation_client, source, target ) @@ -272,24 +266,20 @@ def create_resource_link(cls, **data) -> dict: ) raise e - def revoke_table_resource_link_access(self, table: models.DatasetTable, principals: [str]): + def revoke_table_resource_link_access(self, table: DatasetTable, principals: [str]): """ Revokes access to glue table resource link Parameters ---------- - table : models.DatasetTable + table : DatasetTable principals: List of strings. IAM role arn and Quicksight groups Returns ------- True if revoke is successful """ - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + glue_client = self.glue_client() + if not glue_client.table_exists(table.GlueTableName): logger.info( f'Resource link could not be found ' f'on {self.target_environment.AwsAccountId}/{self.shared_db_name}/{table.GlueTableName} ' @@ -304,7 +294,7 @@ def revoke_table_resource_link_access(self, table: models.DatasetTable, principa f'for principal {principal}' ) - LakeFormation.batch_revoke_permissions( + LakeFormationClient.batch_revoke_permissions( SessionHelper.remote_session(self.target_environment.AwsAccountId).client( 'lakeformation', region_name=self.target_environment.region ), @@ -333,18 +323,14 @@ def revoke_source_table_access(self, table, principals: [str]): Revokes access to the source glue table Parameters ---------- - table : models.DatasetTable + table : DatasetTable Returns ------- True if revoke is successful """ - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + glue_client = self.glue_client() + if not glue_client.table_exists(table.GlueTableName): logger.info( f'Source table could not be found ' f'on {self.source_environment.AwsAccountId}/{self.dataset.GlueDatabaseName}/{table.GlueTableName} ' @@ -357,7 +343,7 @@ def revoke_source_table_access(self, table, principals: [str]): f'on {self.source_environment.AwsAccountId}/{self.dataset.GlueDatabaseName}/{table.GlueTableName} ' f'for principals {principals}' ) - LakeFormation.revoke_source_table_access( + LakeFormationClient.revoke_source_table_access( target_accountid=self.target_environment.AwsAccountId, region=self.target_environment.region, source_database=self.dataset.GlueDatabaseName, @@ -367,22 +353,14 @@ def revoke_source_table_access(self, table, principals: [str]): ) return True - def delete_resource_link_table(self, table: models.DatasetTable): + def delete_resource_link_table(self, table: DatasetTable): logger.info(f'Deleting shared table {table.GlueTableName}') + glue_client = self.glue_client() - if not Glue.table_exists( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName, - ): + if not glue_client.table_exists(table.GlueTableName): return True - Glue.delete_table( - accountid=self.target_environment.AwsAccountId, - region=self.target_environment.region, - database=self.shared_db_name, - tablename=table.GlueTableName - ) + + glue_client.delete_table(table.GlueTableName) return True @classmethod @@ -405,7 +383,7 @@ def share_table_with_target_account(cls, **data): ) try: - LakeFormation.revoke_iamallowedgroups_super_permission_from_table( + LakeFormationClient.revoke_iamallowedgroups_super_permission_from_table( source_lf_client, source_accountid, data['source']['database'], @@ -413,7 +391,7 @@ def share_table_with_target_account(cls, **data): ) time.sleep(1) - LakeFormation.grant_permissions_to_table( + LakeFormationClient.grant_permissions_to_table( source_lf_client, target_accountid, data['source']['database'], @@ -477,34 +455,15 @@ def revoke_external_account_access_on_source_account(self) -> [dict]: 'PermissionsWithGrantOption': ['DESCRIBE', 'SELECT'], } ) - LakeFormation.batch_revoke_permissions( + LakeFormationClient.batch_revoke_permissions( client, self.source_environment.AwsAccountId, revoke_entries ) return revoke_entries - def delete_ram_resource_shares(self, resource_arn: str) -> [dict]: - """ - Deletes resource share for the resource arn - Parameters - ---------- - resource_arn : glue table arn - - Returns - ------- - list of ram associations - """ - logger.info(f'Cleaning RAM resource shares for resource: {resource_arn} ...') - return Ram.delete_resource_shares( - SessionHelper.remote_session( - accountid=self.source_environment.AwsAccountId - ).client('ram', region_name=self.source_environment.region), - resource_arn, - ) - def handle_share_failure( self, - table: models.DatasetTable, - share_item: models.ShareObjectItem, + table: DatasetTable, + share_item: ShareObjectItem, error: Exception, ) -> bool: """ @@ -526,15 +485,15 @@ def handle_share_failure( f'due to: {error}' ) - AlarmService().trigger_table_sharing_failure_alarm( + DatasetAlarmService().trigger_table_sharing_failure_alarm( table, self.share, self.target_environment ) return True def handle_revoke_failure( self, - table: models.DatasetTable, - share_item: models.ShareObjectItem, + table: DatasetTable, + share_item: ShareObjectItem, error: Exception, ) -> bool: """ @@ -549,7 +508,14 @@ def handle_revoke_failure( f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_revoke_table_sharing_failure_alarm( + DatasetAlarmService().trigger_revoke_table_sharing_failure_alarm( table, self.share, self.target_environment ) return True + + def glue_client(self): + return GlueClient( + account_id=self.target_environment.AwsAccountId, + region=self.target_environment.region, + database=self.shared_db_name, + ) diff --git a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py similarity index 79% rename from backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py rename to backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py index 0b1a15e87..5de4c01f3 100644 --- a/backend/dataall/tasks/data_sharing/share_managers/s3_share_manager.py +++ b/backend/dataall/modules/dataset_sharing/services/share_managers/s3_share_manager.py @@ -3,13 +3,17 @@ import json import time -from ....db import models, api, utils -from ....aws.handlers.sts import SessionHelper -from ....aws.handlers.s3 import S3 -from ....aws.handlers.kms import KMS -from ....aws.handlers.iam import IAM +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.base.db import utils +from dataall.base.aws.sts import SessionHelper +from dataall.modules.dataset_sharing.aws.s3_client import S3ControlClient, S3Client +from dataall.modules.dataset_sharing.aws.kms_client import KmsClient +from dataall.base.aws.iam import IAM +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository -from ....utils.alarm_service import AlarmService +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, Dataset logger = logging.getLogger(__name__) ACCESS_POINT_CREATION_TIME = 30 @@ -20,13 +24,13 @@ class S3ShareManager: def __init__( self, session, - dataset: models.Dataset, - share: models.ShareObject, - target_folder: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, - source_env_group: models.EnvironmentGroup, - env_group: models.EnvironmentGroup, + dataset: Dataset, + share: ShareObject, + target_folder: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, + source_env_group: EnvironmentGroup, + env_group: EnvironmentGroup, ): self.session = session self.source_env_group = source_env_group @@ -36,10 +40,10 @@ def __init__( self.target_folder = target_folder self.source_environment = source_environment self.target_environment = target_environment - self.share_item = api.ShareObject.find_share_item_by_folder( + self.share_item = ShareObjectRepository.find_sharable_item( session, - share, - target_folder, + share.shareUri, + target_folder.locationUri, ) self.access_point_name = self.share_item.S3AccessPointName @@ -55,15 +59,15 @@ def __init__( @abc.abstractmethod def process_approved_shares(self, *kwargs) -> bool: - return NotImplementedError + raise NotImplementedError @abc.abstractmethod def process_revoked_shares(self, *kwargs) -> bool: - return NotImplementedError + raise NotImplementedError @abc.abstractmethod def clean_up_share(self, *kwargs): - return NotImplementedError + raise NotImplementedError @staticmethod def build_access_point_name(share): @@ -83,7 +87,9 @@ def manage_bucket_policy(self): logger.info( f'Manage Bucket policy for {self.bucket_name}' ) - bucket_policy = json.loads(S3.get_bucket_policy(self.source_account_id, self.source_environment.region, self.bucket_name)) + + s3_client = S3Client(self.source_account_id, self.source_environment.region) + bucket_policy = json.loads(s3_client.get_bucket_policy(self.bucket_name)) for statement in bucket_policy["Statement"]: if statement.get("Sid") in ["AllowAllToAdmin", "DelegateAccessToAccessPoint"]: return @@ -123,7 +129,8 @@ def manage_bucket_policy(self): } bucket_policy["Statement"].append(allow_owner_access) bucket_policy["Statement"].append(delegated_to_accesspoint) - S3.create_bucket_policy(self.source_account_id, self.source_environment.region, self.bucket_name, json.dumps(bucket_policy)) + s3_client = S3Client(self.source_account_id, self.source_environment.region) + s3_client.create_bucket_policy(self.bucket_name, json.dumps(bucket_policy)) def grant_target_role_access_policy(self): """ @@ -191,21 +198,22 @@ def manage_access_point_and_policy(self): :return: """ - access_point_arn = S3.get_bucket_access_point_arn(self.source_account_id, self.source_environment.region, self.access_point_name) + s3_client = S3ControlClient(self.source_account_id, self.source_environment.region) + access_point_arn = s3_client.get_bucket_access_point_arn(self.access_point_name) if not access_point_arn: logger.info( f'Access point {self.access_point_name} does not exists, creating...' ) - access_point_arn = S3.create_bucket_access_point(self.source_account_id, self.source_environment.region, self.bucket_name, self.access_point_name) + access_point_arn = s3_client.create_bucket_access_point(self.bucket_name, self.access_point_name) # Access point creation is slow retries = 1 - while not S3.get_bucket_access_point_arn(self.source_account_id, self.source_environment.region, self.access_point_name) and retries < ACCESS_POINT_CREATION_RETRIES: + while not s3_client.get_bucket_access_point_arn(self.access_point_name) and retries < ACCESS_POINT_CREATION_RETRIES: logger.info( 'Waiting 30s for access point creation to complete..' ) time.sleep(ACCESS_POINT_CREATION_TIME) retries += 1 - existing_policy = S3.get_access_point_policy(self.source_account_id, self.source_environment.region, self.access_point_name) + existing_policy = s3_client.get_access_point_policy(self.access_point_name) # requester will use this role to access resources target_requester_id = SessionHelper.get_role_id(self.target_account_id, self.target_requester_IAMRoleName) if existing_policy: @@ -230,7 +238,7 @@ def manage_access_point_and_policy(self): statements[f"{target_requester_id}1"]["Resource"] = resource_list existing_policy["Statement"] = list(statements.values()) else: - additional_policy = S3.generate_access_point_policy_template( + additional_policy = S3ControlClient.generate_access_point_policy_template( target_requester_id, access_point_arn, self.s3_prefix, @@ -242,7 +250,7 @@ def manage_access_point_and_policy(self): logger.info( f'Access point policy for access point {access_point_arn} does not exists, creating policy...' ) - access_point_policy = S3.generate_access_point_policy_template( + access_point_policy = S3ControlClient.generate_access_point_policy_template( target_requester_id, access_point_arn, self.s3_prefix, @@ -264,17 +272,18 @@ def manage_access_point_and_policy(self): } } access_point_policy["Statement"].append(admin_statement) - S3.attach_access_point_policy( - account_id=self.source_account_id, region=self.source_environment.region, - access_point_name=self.access_point_name, policy=json.dumps(access_point_policy)) + s3_client.attach_access_point_policy( + access_point_name=self.access_point_name, policy=json.dumps(access_point_policy) + ) def update_dataset_bucket_key_policy(self): logger.info( 'Updating dataset Bucket KMS key policy...' ) key_alias = f"alias/{self.dataset.KmsAlias}" - kms_keyId = KMS.get_key_id(self.source_account_id, self.source_environment.region, key_alias) - existing_policy = KMS.get_key_policy(self.source_account_id, self.source_environment.region, kms_keyId, "default") + kms_client = KmsClient(self.source_account_id, self.source_environment.region) + kms_key_id = kms_client.get_key_id(key_alias) + existing_policy = kms_client.get_key_policy(kms_key_id) target_requester_id = SessionHelper.get_role_id(self.target_account_id, self.target_requester_IAMRoleName) if existing_policy and f'{target_requester_id}:*' not in existing_policy: policy = json.loads(existing_policy) @@ -294,20 +303,15 @@ def update_dataset_bucket_key_policy(self): } } ) - KMS.put_key_policy( - self.source_account_id, - self.source_environment.region, - kms_keyId, - "default", - json.dumps(policy) - ) + kms_client.put_key_policy(kms_key_id, json.dumps(policy)) def delete_access_point_policy(self): logger.info( f'Deleting access point policy for access point {self.access_point_name}...' ) - access_point_policy = json.loads(S3.get_access_point_policy(self.source_account_id, self.source_environment.region, self.access_point_name)) - access_point_arn = S3.get_bucket_access_point_arn(self.source_account_id, self.source_environment.region, self.access_point_name) + s3_client = S3ControlClient(self.source_account_id, self.source_environment.region) + access_point_policy = json.loads(s3_client.get_access_point_policy(self.access_point_name)) + access_point_arn = s3_client.get_bucket_access_point_arn(self.access_point_name) target_requester_id = SessionHelper.get_role_id(self.target_account_id, self.target_requester_IAMRoleName) statements = {item["Sid"]: item for item in access_point_policy["Statement"]} if f"{target_requester_id}0" in statements.keys(): @@ -319,30 +323,35 @@ def delete_access_point_policy(self): else: access_point_policy["Statement"].remove(statements[f"{target_requester_id}0"]) access_point_policy["Statement"].remove(statements[f"{target_requester_id}1"]) - S3.attach_access_point_policy(self.source_account_id, self.source_environment.region, self.access_point_name, json.dumps(access_point_policy)) + s3_client.attach_access_point_policy( + access_point_name=self.access_point_name, + policy=json.dumps(access_point_policy) + ) @staticmethod def delete_access_point( - share: models.ShareObject, - dataset: models.Dataset, + share: ShareObject, + dataset: Dataset, ): access_point_name = S3ShareManager.build_access_point_name(share) logger.info( f'Deleting access point {access_point_name}...' ) - access_point_policy = json.loads(S3.get_access_point_policy(dataset.AwsAccountId, dataset.region, access_point_name)) + + s3_client = S3ControlClient(dataset.AwsAccountId, dataset.region) + access_point_policy = json.loads(s3_client.get_access_point_policy(access_point_name)) if len(access_point_policy["Statement"]) <= 1: # At least we have the 'AllowAllToAdmin' statement - S3.delete_bucket_access_point(dataset.AwsAccountId, dataset.region, access_point_name) + s3_client.delete_bucket_access_point(access_point_name) return True else: return False @staticmethod def delete_target_role_access_policy( - share: models.ShareObject, - dataset: models.Dataset, - target_environment: models.Environment, + share: ShareObject, + dataset: Dataset, + target_environment: Environment, ): logger.info( 'Deleting target role IAM policy...' @@ -375,29 +384,24 @@ def delete_target_role_access_policy( @staticmethod def delete_dataset_bucket_key_policy( - share: models.ShareObject, - dataset: models.Dataset, - target_environment: models.Environment, + share: ShareObject, + dataset: Dataset, + target_environment: Environment, ): logger.info( 'Deleting dataset bucket KMS key policy...' ) key_alias = f"alias/{dataset.KmsAlias}" - kms_keyId = KMS.get_key_id(dataset.AwsAccountId, dataset.region, key_alias) - existing_policy = KMS.get_key_policy(dataset.AwsAccountId, dataset.region, kms_keyId, "default") + kms_client = KmsClient(dataset.AwsAccountId, dataset.region) + kms_key_id = kms_client.get_key_id(key_alias) + existing_policy = kms_client.get_key_policy(kms_key_id) target_requester_id = SessionHelper.get_role_id(target_environment.AwsAccountId, share.principalIAMRoleName) if existing_policy and f'{target_requester_id}:*' in existing_policy: policy = json.loads(existing_policy) policy["Statement"] = [item for item in policy["Statement"] if item.get("Sid", None) != f"{target_requester_id}"] - KMS.put_key_policy( - dataset.AwsAccountId, - dataset.region, - kms_keyId, - "default", - json.dumps(policy) - ) + kms_client.put_key_policy(kms_key_id, json.dumps(policy)) - def handle_share_failure(self, error: Exception) -> bool: + def handle_share_failure(self, error: Exception) -> None: """ Handles share failure by raising an alarm to alarmsTopic Returns @@ -410,12 +414,11 @@ def handle_share_failure(self, error: Exception) -> bool: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_folder_sharing_failure_alarm( + DatasetAlarmService().trigger_folder_sharing_failure_alarm( self.target_folder, self.share, self.target_environment ) - return True - def handle_revoke_failure(self, error: Exception) -> bool: + def handle_revoke_failure(self, error: Exception) -> None: """ Handles share failure by raising an alarm to alarmsTopic Returns @@ -428,7 +431,6 @@ def handle_revoke_failure(self, error: Exception) -> bool: f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} ' f'due to: {error}' ) - AlarmService().trigger_revoke_folder_sharing_failure_alarm( + DatasetAlarmService().trigger_revoke_folder_sharing_failure_alarm( self.target_folder, self.share, self.target_environment ) - return True diff --git a/backend/dataall/modules/dataset_sharing/services/share_notification_service.py b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py new file mode 100644 index 000000000..898a83cce --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_notification_service.py @@ -0,0 +1,98 @@ +from dataall.core.notifications.db.notification_repositories import Notification +from dataall.core.notifications.db.notification_models import NotificationType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.datasets_base.db.dataset_models import Dataset + + +class ShareNotificationService: + @staticmethod + def notify_share_object_submission( + session, username: str, dataset: Dataset, share: ShareObject + ): + notifications = [Notification.create( + session=session, + username=dataset.owner, + notification_type=NotificationType.SHARE_OBJECT_SUBMITTED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} submitted share request for dataset {dataset.label}', + )] + session.add_all(notifications) + return notifications + + @staticmethod + def notify_share_object_approval( + session, username: str, dataset: Dataset, share: ShareObject + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=NotificationType.SHARE_OBJECT_APPROVED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} approved share request for dataset {dataset.label}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def notify_share_object_rejection( + session, username: str, dataset: Dataset, share: ShareObject + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=NotificationType.SHARE_OBJECT_REJECTED, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'User {username} rejected share request for dataset {dataset.label}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def notify_new_data_available_from_owners( + session, dataset: Dataset, share: ShareObject, s3_prefix + ): + notifications = [] + targeted_users = ShareNotificationService._get_share_object_targeted_users( + session, dataset, share + ) + for user in targeted_users: + notifications.append( + Notification.create( + session=session, + username=user, + notification_type=NotificationType.DATASET_VERSION, + target_uri=f'{share.shareUri}|{dataset.datasetUri}', + message=f'New data (at {s3_prefix}) is available from dataset {dataset.datasetUri} ' + f'shared by owner {dataset.owner}', + ) + ) + session.add_all(notifications) + return notifications + + @staticmethod + def _get_share_object_targeted_users(session, dataset, share): + targeted_users = ShareNotificationService._get_dataset_stewards(dataset) + targeted_users.append(dataset.owner) + targeted_users.append(share.owner) + return targeted_users + + @staticmethod + def _get_dataset_stewards(dataset): + stewards = list() + stewards.append(dataset.SamlAdminGroupName) + stewards.append(dataset.stewards) + return stewards diff --git a/backend/dataall/modules/dataset_sharing/services/share_object_service.py b/backend/dataall/modules/dataset_sharing/services/share_object_service.py new file mode 100644 index 000000000..69d1ff2f2 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_object_service.py @@ -0,0 +1,364 @@ +from dataall.core.tasks.service_handlers import Worker +from dataall.base.context import get_context +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.db.environment_models import EnvironmentGroup, ConsumptionRole +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission +from dataall.core.tasks.db.task_models import Task +from dataall.base.db import utils +from dataall.base.db.exceptions import UnauthorizedOperation +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareableType, ShareItemStatus, \ + ShareObjectStatus, PrincipalType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareObjectSM, ShareItemSM +from dataall.modules.dataset_sharing.services.share_exceptions import ShareItemsFound +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.dataset_sharing.services.share_permissions import REJECT_SHARE_OBJECT, APPROVE_SHARE_OBJECT, \ + SUBMIT_SHARE_OBJECT, SHARE_OBJECT_APPROVER, SHARE_OBJECT_REQUESTER, CREATE_SHARE_OBJECT, DELETE_SHARE_OBJECT, \ + GET_SHARE_OBJECT +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + + +class ShareObjectService: + @staticmethod + @has_resource_permission(GET_SHARE_OBJECT) + def get_share_object(uri): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.get_share_by_uri(session, uri) + + @classmethod + @has_resource_permission(CREATE_SHARE_OBJECT) + def create_share_object( + cls, + uri: str, + dataset_uri: str, + item_uri: str, + item_type: str, + group_uri, + principal_id, + principal_type, + requestPurpose, + ): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + environment = EnvironmentService.get_environment_by_uri(session, uri) + + if environment.region != dataset.region: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Requester Team {group_uri} works in region {environment.region} ' + f'and the requested dataset is stored in region {dataset.region}', + ) + + if principal_type == PrincipalType.ConsumptionRole.value: + consumption_role: ConsumptionRole = EnvironmentService.get_environment_consumption_role( + session, + principal_id, + environment.environmentUri + ) + principal_iam_role_name = consumption_role.IAMRoleName + else: + env_group: EnvironmentGroup = EnvironmentService.get_environment_group( + session, + group_uri, + environment.environmentUri + ) + principal_iam_role_name = env_group.environmentIAMRoleName + + if ( + dataset.stewards == group_uri or dataset.SamlAdminGroupName == group_uri + ) and environment.environmentUri == dataset.environmentUri and principal_type == PrincipalType.Group.value: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Team: {group_uri} is managing the dataset {dataset.name}', + ) + + cls._validate_group_membership(session, group_uri, environment.environmentUri) + + share = ShareObjectRepository.find_share(session, dataset, environment, principal_id, group_uri) + if not share: + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=environment.environmentUri, + owner=context.username, + groupUri=group_uri, + principalId=principal_id, + principalType=principal_type, + principalIAMRoleName=principal_iam_role_name, + status=ShareObjectStatus.Draft.value, + requestPurpose=requestPurpose + ) + ShareObjectRepository.save_and_commit(session, share) + + if item_uri: + item = ShareObjectRepository.get_share_item(session, item_type, item_uri) + share_item = ShareObjectRepository.find_sharable_item(session, share.shareUri, item_uri) + + s3_access_point_name = utils.slugify( + share.datasetUri + '-' + share.principalId, + max_length=50, lowercase=True, regex_pattern='[^a-zA-Z0-9-]', separator='-' + ) + + if not share_item and item: + new_share_item: ShareObjectItem = ShareObjectItem( + shareUri=share.shareUri, + itemUri=item_uri, + itemType=item_type, + itemName=item.name, + status=ShareItemStatus.PendingApproval.value, + owner=context.username, + GlueDatabaseName=dataset.GlueDatabaseName + if item_type == ShareableType.Table.value + else '', + GlueTableName=item.GlueTableName + if item_type == ShareableType.Table.value + else '', + S3AccessPointName=s3_access_point_name + if item_type == ShareableType.StorageLocation.value + else '', + ) + session.add(new_share_item) + + activity = Activity( + action='SHARE_OBJECT:CREATE', + label='SHARE_OBJECT:CREATE', + owner=context.username, + summary=f'{context.username} created a share object for the {dataset.name} in {environment.name} for the principal: {principal_id}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + + # Attaching REQUESTER permissions to: + # requester group (groupUri) + # environment.SamlGroupName (if not dataset admins) + ResourcePolicy.attach_resource_policy( + session=session, + group=group_uri, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + + # Attaching APPROVER permissions to: + # dataset.stewards (includes the dataset Admins) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + return share + + @classmethod + @has_resource_permission(SUBMIT_SHARE_OBJECT) + def submit_share_object(cls, uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = cls._get_share_data(session, uri) + + valid_states = [ShareItemStatus.PendingApproval.value] + valid_share_items_states = [x for x in valid_states if x in states] + + if not valid_share_items_states: + raise ShareItemsFound( + action='Submit Share Object', + message='The request is empty of pending items. Add items to share request.', + ) + + cls._run_transitions(session, share, states, ShareObjectActions.Submit) + ShareNotificationService.notify_share_object_submission( + session, context.username, dataset, share + ) + return share + + @classmethod + @has_resource_permission(APPROVE_SHARE_OBJECT) + def approve_share_object(cls, uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = cls._get_share_data(session, uri) + cls._run_transitions(session, share, states, ShareObjectActions.Approve) + + # GET TABLES SHARED AND APPROVE SHARE FOR EACH TABLE + share_table_items = ShareObjectRepository.find_all_share_items(session, uri, ShareableType.Table.value) + for table in share_table_items: + ResourcePolicy.attach_resource_policy( + session=session, + group=share.principalId, + permissions=DATASET_TABLE_READ, + resource_uri=table.itemUri, + resource_type=DatasetTable.__name__, + ) + + share.rejectPurpose = "" + session.commit() + + ShareNotificationService.notify_share_object_approval(session, context.username, dataset, share) + + approve_share_task: Task = Task( + action='ecs.share.approve', + targetUri=uri, + payload={'environmentUri': share.environmentUri}, + ) + session.add(approve_share_task) + + Worker.queue(engine=context.db_engine, task_ids=[approve_share_task.taskUri]) + + return share + + @staticmethod + @has_resource_permission(SUBMIT_SHARE_OBJECT) + def update_share_request_purpose(uri: str, request_purpose) -> bool: + with get_context().db_engine.scoped_session() as session: + share = ShareObjectRepository.get_share_by_uri(session, uri) + share.requestPurpose = request_purpose + session.commit() + return True + + @staticmethod + @has_resource_permission(REJECT_SHARE_OBJECT) + def update_share_reject_purpose(uri: str, reject_purpose) -> bool: + with get_context().db_engine.scoped_session() as session: + share = ShareObjectRepository.get_share_by_uri(session, uri) + share.rejectPurpose = reject_purpose + session.commit() + return True + + @classmethod + @has_resource_permission(REJECT_SHARE_OBJECT) + def reject_share_object(cls, uri: str, reject_purpose: str): + context = get_context() + with context.db_engine.scoped_session() as session: + share, dataset, states = cls._get_share_data(session, uri) + cls._run_transitions(session, share, states, ShareObjectActions.Reject) + ResourcePolicy.delete_resource_policy( + session=session, + group=share.groupUri, + resource_uri=dataset.datasetUri, + ) + + # Update Reject Purpose + share.rejectPurpose = reject_purpose + session.commit() + + ShareNotificationService.notify_share_object_rejection(session, context.username, dataset, share) + return share + + @classmethod + @has_resource_permission(DELETE_SHARE_OBJECT) + def delete_share_object(cls, uri: str): + with get_context().db_engine.scoped_session() as session: + share, dataset, states = cls._get_share_data(session, uri) + shared_share_items_states = [x for x in ShareItemSM.get_share_item_shared_states() if x in states] + + new_state = cls._run_transitions(session, share, states, ShareObjectActions.Delete) + if shared_share_items_states: + raise ShareItemsFound( + action='Delete share object', + message='There are shared items in this request. ' + 'Revoke access to these items before deleting the request.', + ) + + if new_state == ShareObjectStatus.Deleted.value: + session.delete(share) + + return True + + @staticmethod + def resolve_share_object_statistics(uri): + with get_context().db_engine.scoped_session() as session: + tables = ShareObjectRepository.count_sharable_items(session, uri, 'DatasetTable') + locations = ShareObjectRepository.count_sharable_items(session, uri, 'DatasetStorageLocation') + shared_items = ShareObjectRepository.count_items_in_states( + session, uri, ShareItemSM.get_share_item_shared_states() + ) + revoked_items = ShareObjectRepository.count_items_in_states( + session, uri, [ShareItemStatus.Revoke_Succeeded.value] + ) + failed_states = [ + ShareItemStatus.Share_Failed.value, + ShareItemStatus.Revoke_Failed.value + ] + failed_items = ShareObjectRepository.count_items_in_states( + session, uri, failed_states + ) + pending_items = ShareObjectRepository.count_items_in_states( + session, uri, [ShareItemStatus.PendingApproval.value] + ) + return {'tables': tables, 'locations': locations, 'sharedItems': shared_items, 'revokedItems': revoked_items, + 'failedItems': failed_items, 'pendingItems': pending_items} + + @staticmethod + def list_shares_in_my_inbox(filter: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return ShareObjectRepository.list_user_received_share_requests( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + @staticmethod + def list_shares_in_my_outbox(filter): + context = get_context() + with context.db_engine.scoped_session() as session: + return ShareObjectRepository.list_user_sent_share_requests( + session=session, + username=context.username, + groups=context.groups, + data=filter, + ) + + @staticmethod + def _run_transitions(session, share, share_items_states, action): + share_sm = ShareObjectSM(share.status) + new_share_state = share_sm.run_transition(action.value) + + for item_state in share_items_states: + item_sm = ShareItemSM(item_state) + new_state = item_sm.run_transition(action.value) + item_sm.update_state(session, share.shareUri, new_state) + + share_sm.update_state(session, share, new_share_state) + return new_share_state + + @staticmethod + def _get_share_data(session, uri): + share = ShareObjectRepository.get_share_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, uri) + return share, dataset, share_items_states + + @staticmethod + def _validate_group_membership( + session, share_object_group, environment_uri + ): + context = get_context() + if share_object_group and share_object_group not in context.groups: + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'User: {context.username} is not a member of the team {share_object_group}', + ) + if share_object_group not in EnvironmentService.list_environment_groups( + session=session, + uri=environment_uri, + ): + raise UnauthorizedOperation( + action=CREATE_SHARE_OBJECT, + message=f'Team: {share_object_group} is not a member of the environment {environment_uri}', + ) diff --git a/backend/dataall/modules/dataset_sharing/services/share_permissions.py b/backend/dataall/modules/dataset_sharing/services/share_permissions.py new file mode 100644 index 000000000..6e539f6bb --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_permissions.py @@ -0,0 +1,58 @@ +""" +SHARE OBJECT +""" +from dataall.core.permissions.permissions import ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL, RESOURCES_ALL, \ + RESOURCES_ALL_WITH_DESC + +ADD_ITEM = 'ADD_ITEM' +REMOVE_ITEM = 'REMOVE_ITEM' +SUBMIT_SHARE_OBJECT = 'SUBMIT_SHARE_OBJECT' +APPROVE_SHARE_OBJECT = 'APPROVE_SHARE_OBJECT' +REJECT_SHARE_OBJECT = 'REJECT_SHARE_OBJECT' +DELETE_SHARE_OBJECT = 'DELETE_SHARE_OBJECT' +GET_SHARE_OBJECT = 'GET_SHARE_OBJECT' +LIST_SHARED_ITEMS = 'LIST_SHARED_ITEMS' +SHARE_OBJECT_REQUESTER = [ + ADD_ITEM, + REMOVE_ITEM, + SUBMIT_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, + DELETE_SHARE_OBJECT, +] +SHARE_OBJECT_APPROVER = [ + ADD_ITEM, + REMOVE_ITEM, + APPROVE_SHARE_OBJECT, + REJECT_SHARE_OBJECT, + DELETE_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, +] +SHARE_OBJECT_ALL = [ + ADD_ITEM, + REMOVE_ITEM, + SUBMIT_SHARE_OBJECT, + APPROVE_SHARE_OBJECT, + REJECT_SHARE_OBJECT, + DELETE_SHARE_OBJECT, + GET_SHARE_OBJECT, + LIST_SHARED_ITEMS, +] + +CREATE_SHARE_OBJECT = 'CREATE_SHARE_OBJECT' +LIST_ENVIRONMENT_SHARED_WITH_OBJECTS = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' + +ENVIRONMENT_INVITED.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) +ENVIRONMENT_ALL.append(CREATE_SHARE_OBJECT) +ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) + +RESOURCES_ALL.extend(SHARE_OBJECT_ALL) +for perm in SHARE_OBJECT_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm + +RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Request datasets access for this environment' +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SHARED_WITH_OBJECTS] = "List datasets shared with this environments" diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/__init__.py b/backend/dataall/modules/dataset_sharing/services/share_processors/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py similarity index 75% rename from backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py index ffdf7d487..d28340cd6 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_cross_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_cross_account_share.py @@ -1,9 +1,12 @@ import logging - +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions from ..share_managers import LFShareManager -from ....aws.handlers.ram import Ram -from ....db import models, api +from dataall.modules.dataset_sharing.aws.ram_client import RamClient +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareItemSM log = logging.getLogger(__name__) @@ -12,13 +15,13 @@ class ProcessLFCrossAccountShare(LFShareManager): def __init__( self, session, - dataset: models.Dataset, - share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], - source_environment: models.Environment, - target_environment: models.Environment, - env_group: models.EnvironmentGroup, + dataset: Dataset, + share: ShareObject, + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], + source_environment: Environment, + target_environment: Environment, + env_group: EnvironmentGroup, ): super().__init__( session, @@ -71,8 +74,8 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: log.info(f"Sharing table {table.GlueTableName}...") - share_item = api.ShareObject.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: @@ -82,8 +85,8 @@ def process_approved_shares(self) -> bool: ) continue - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -96,20 +99,20 @@ def process_approved_shares(self) -> bool: ( retry_share_table, failed_invitations, - ) = Ram.accept_ram_invitation(**data) + ) = RamClient.accept_ram_invitation(**data) if retry_share_table: self.share_table_with_target_account(**data) - Ram.accept_ram_invitation(**data) + RamClient.accept_ram_invitation(**data) self.create_resource_link(**data) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_share_failure(table=table, share_item=share_item, error=e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -137,12 +140,12 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = api.ShareObject.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -158,12 +161,12 @@ def process_revoked_shares(self) -> bool: self.delete_resource_link_table(table) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_revoke_failure(share_item=share_item, table=table, error=e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -181,7 +184,7 @@ def clean_up_share(self) -> bool: self.delete_shared_database() - if not api.ShareObject.other_approved_share_object_exists( + if not ShareObjectRepository.other_approved_share_object_exists( self.session, self.target_environment.environmentUri, self.dataset.datasetUri, diff --git a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py similarity index 75% rename from backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py rename to backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py index 4b5ad4096..270538a0b 100644 --- a/backend/dataall/tasks/data_sharing/share_processors/lf_process_same_account_share.py +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/lf_process_same_account_share.py @@ -1,7 +1,11 @@ import logging +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareItemSM from ..share_managers import LFShareManager -from ....db import models, api +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset log = logging.getLogger(__name__) @@ -10,13 +14,13 @@ class ProcessLFSameAccountShare(LFShareManager): def __init__( self, session, - dataset: models.Dataset, - share: models.ShareObject, - shared_tables: [models.DatasetTable], - revoked_tables: [models.DatasetTable], - source_environment: models.Environment, - target_environment: models.Environment, - env_group: models.EnvironmentGroup, + dataset: Dataset, + share: ShareObject, + shared_tables: [DatasetTable], + revoked_tables: [DatasetTable], + source_environment: Environment, + target_environment: Environment, + env_group: EnvironmentGroup, ): super().__init__( session, @@ -66,8 +70,8 @@ def process_approved_shares(self) -> bool: for table in self.shared_tables: - share_item = api.ShareObject.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: @@ -76,8 +80,8 @@ def process_approved_shares(self) -> bool: f'and Dataset Table {table.GlueTableName} continuing loop...' ) continue - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -87,12 +91,12 @@ def process_approved_shares(self) -> bool: data = self.build_share_data(table) self.create_resource_link(**data) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_share_failure(table, share_item, e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) shared_item_SM.update_state_single_item(self.session, share_item, new_state) success = False @@ -117,8 +121,8 @@ def process_revoked_shares(self) -> bool: shared_db_name = self.build_shared_db_name() principals = self.get_share_principals() for table in self.revoked_tables: - share_item = api.ShareObject.find_share_item_by_table( - self.session, self.share, table + share_item = ShareObjectRepository.find_sharable_item( + self.session, self.share.shareUri, table.tableUri ) if not share_item: log.info( @@ -127,8 +131,8 @@ def process_revoked_shares(self) -> bool: ) continue - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) try: @@ -143,12 +147,12 @@ def process_revoked_shares(self) -> bool: self.delete_resource_link_table(table) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) except Exception as e: self.handle_revoke_failure(share_item, table, e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) revoked_item_SM.update_state_single_item(self.session, share_item, new_state) success = False diff --git a/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py new file mode 100644 index 000000000..8e2f6cf38 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/services/share_processors/s3_process_share.py @@ -0,0 +1,199 @@ +import logging + +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from ..share_managers import S3ShareManager +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, Dataset +from dataall.modules.dataset_sharing.db.enums import ShareItemStatus, ShareObjectActions, ShareItemActions +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareItemSM + +log = logging.getLogger(__name__) + + +class ProcessS3Share(S3ShareManager): + def __init__( + self, + session, + dataset: Dataset, + share: ShareObject, + share_folder: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, + source_env_group: EnvironmentGroup, + env_group: EnvironmentGroup, + ): + + super().__init__( + session, + dataset, + share, + share_folder, + source_environment, + target_environment, + source_env_group, + env_group, + ) + + @classmethod + def process_approved_shares( + cls, + session, + dataset: Dataset, + share: ShareObject, + share_folders: [DatasetStorageLocation], + source_environment: Environment, + target_environment: Environment, + source_env_group: EnvironmentGroup, + env_group: EnvironmentGroup + ) -> bool: + """ + 1) update_share_item_status with Start action + 2) (one time only) manage_bucket_policy - grants permission in the bucket policy + 3) grant_target_role_access_policy + 4) manage_access_point_and_policy + 5) update_dataset_bucket_key_policy + 6) update_share_item_status with Finish action + + Returns + ------- + True if share is granted successfully + """ + log.info( + '##### Starting Sharing folders #######' + ) + success = True + for folder in share_folders: + log.info(f'sharing folder: {folder}') + sharing_item = ShareObjectRepository.find_sharable_item( + session, + share.shareUri, + folder.locationUri, + ) + shared_item_SM = ShareItemSM(ShareItemStatus.Share_Approved.value) + new_state = shared_item_SM.run_transition(ShareObjectActions.Start.value) + shared_item_SM.update_state_single_item(session, sharing_item, new_state) + + sharing_folder = cls( + session, + dataset, + share, + folder, + source_environment, + target_environment, + source_env_group, + env_group, + ) + + try: + sharing_folder.manage_bucket_policy() + sharing_folder.grant_target_role_access_policy() + sharing_folder.manage_access_point_and_policy() + sharing_folder.update_dataset_bucket_key_policy() + + new_state = shared_item_SM.run_transition(ShareItemActions.Success.value) + shared_item_SM.update_state_single_item(session, sharing_item, new_state) + + except Exception as e: + sharing_folder.handle_share_failure(e) + new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value) + shared_item_SM.update_state_single_item(session, sharing_item, new_state) + success = False + + return success + + @classmethod + def process_revoked_shares( + cls, + session, + dataset: Dataset, + share: ShareObject, + revoke_folders: [DatasetStorageLocation], + source_environment: Environment, + target_environment: Environment, + source_env_group: EnvironmentGroup, + env_group: EnvironmentGroup + ) -> bool: + """ + 1) update_share_item_status with Start action + 2) delete_access_point_policy for folder + 3) update_share_item_status with Finish action + + Returns + ------- + True if share is revoked successfully + """ + + log.info( + '##### Starting Revoking folders #######' + ) + success = True + for folder in revoke_folders: + log.info(f'revoking access to folder: {folder}') + removing_item = ShareObjectRepository.find_sharable_item( + session, + share.shareUri, + folder.locationUri, + ) + + revoked_item_SM = ShareItemSM(ShareItemStatus.Revoke_Approved.value) + new_state = revoked_item_SM.run_transition(ShareObjectActions.Start.value) + revoked_item_SM.update_state_single_item(session, removing_item, new_state) + + removing_folder = cls( + session, + dataset, + share, + folder, + source_environment, + target_environment, + source_env_group, + env_group, + ) + + try: + removing_folder.delete_access_point_policy() + + new_state = revoked_item_SM.run_transition(ShareItemActions.Success.value) + revoked_item_SM.update_state_single_item(session, removing_item, new_state) + + except Exception as e: + removing_folder.handle_revoke_failure(e) + new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value) + revoked_item_SM.update_state_single_item(session, removing_item, new_state) + success = False + + return success + + @staticmethod + def clean_up_share( + dataset: Dataset, + share: ShareObject, + target_environment: Environment + ): + """ + 1) deletes S3 access point for this share in this Dataset S3 Bucket + 2) delete_target_role_access_policy to access the above deleted access point + 3) delete_dataset_bucket_key_policy to remove access to the requester IAM role + + Returns + ------- + True if share is cleaned-up successfully + """ + + clean_up = S3ShareManager.delete_access_point( + share=share, + dataset=dataset + ) + if clean_up: + S3ShareManager.delete_target_role_access_policy( + share=share, + dataset=dataset, + target_environment=target_environment + ) + S3ShareManager.delete_dataset_bucket_key_policy( + share=share, + dataset=dataset, + target_environment=target_environment + ) + + return True diff --git a/backend/dataall/modules/dataset_sharing/tasks/__init__.py b/backend/dataall/modules/dataset_sharing/tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/dataset_sharing/tasks/share_manager_task.py b/backend/dataall/modules/dataset_sharing/tasks/share_manager_task.py new file mode 100644 index 000000000..f884d1425 --- /dev/null +++ b/backend/dataall/modules/dataset_sharing/tasks/share_manager_task.py @@ -0,0 +1,37 @@ +import logging +import os +import sys + +from dataall.modules.dataset_sharing.services.data_sharing_service import DataSharingService +from dataall.base.db import get_engine + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + + +if __name__ == '__main__': + + try: + ENVNAME = os.environ.get('envname', 'local') + ENGINE = get_engine(envname=ENVNAME) + + share_uri = os.getenv('shareUri') + share_item_uri = os.getenv('shareItemUri') + handler = os.getenv('handler') + + if handler == 'approve_share': + log.info(f'Starting processing task for share : {share_uri}...') + DataSharingService.approve_share(engine=ENGINE, share_uri=share_uri) + + elif handler == 'revoke_share': + log.info(f'Starting revoking task for share : {share_uri}...') + DataSharingService.revoke_share(engine=ENGINE, share_uri=share_uri) + + log.info('Sharing task finished successfully') + + except Exception as e: + log.error(f'Sharing task failed due to: {e}') + raise e diff --git a/backend/dataall/modules/datasets/__init__.py b/backend/dataall/modules/datasets/__init__.py new file mode 100644 index 000000000..5c710cf3a --- /dev/null +++ b/backend/dataall/modules/datasets/__init__.py @@ -0,0 +1,157 @@ +"""Contains the code related to datasets""" +import logging +from typing import List, Type, Set + +from dataall.base.loader import ModuleInterface, ImportMode +from dataall.modules.datasets.services.dataset_permissions import GET_DATASET, UPDATE_DATASET +from dataall.modules.datasets_base import DatasetBaseModuleInterface +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn, DatasetStorageLocation, DatasetTable, Dataset + +log = logging.getLogger(__name__) + + +class DatasetApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for dataset GraphQl lambda""" + + @staticmethod + def is_supported(modes): + return ImportMode.API in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.dataset_sharing import SharingApiModuleInterface + from dataall.modules.catalog import CatalogApiModuleInterface + from dataall.modules.feed import FeedApiModuleInterface + from dataall.modules.vote import VoteApiModuleInterface + + return [ + SharingApiModuleInterface, DatasetBaseModuleInterface, CatalogApiModuleInterface, + FeedApiModuleInterface, VoteApiModuleInterface + ] + + def __init__(self): + # these imports are placed inside the method because they are only related to GraphQL api. + from dataall.core.stacks.db.target_type_repositories import TargetType + from dataall.modules.vote.api.resolvers import add_vote_type + from dataall.modules.feed.api.registry import FeedRegistry, FeedDefinition + from dataall.modules.catalog.api.registry import GlossaryRegistry, GlossaryDefinition + from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager + from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer + from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer + from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer + + import dataall.modules.datasets.api + + FeedRegistry.register(FeedDefinition("DatasetTableColumn", DatasetTableColumn)) + FeedRegistry.register(FeedDefinition("DatasetStorageLocation", DatasetStorageLocation)) + FeedRegistry.register(FeedDefinition("DatasetTable", DatasetTable)) + FeedRegistry.register(FeedDefinition("Dataset", Dataset)) + + GlossaryRegistry.register(GlossaryDefinition("Column", "DatasetTableColumn", DatasetTableColumn)) + GlossaryRegistry.register(GlossaryDefinition( + target_type="Folder", + object_type="DatasetStorageLocation", + model=DatasetStorageLocation, + reindexer=DatasetLocationIndexer + )) + + GlossaryRegistry.register(GlossaryDefinition( + target_type="Dataset", + object_type="Dataset", + model=Dataset, + reindexer=DatasetIndexer + )) + + GlossaryRegistry.register(GlossaryDefinition( + target_type="DatasetTable", + object_type="DatasetTable", + model=DatasetTable, + reindexer=DatasetTableIndexer + )) + + add_vote_type("dataset", DatasetIndexer) + + TargetType("dataset", GET_DATASET, UPDATE_DATASET) + + EnvironmentResourceManager.register(DatasetRepository()) + + log.info("API of datasets has been imported") + + +class DatasetAsyncHandlersModuleInterface(ModuleInterface): + """Implements ModuleInterface for dataset async lambda""" + + @staticmethod + def is_supported(modes: Set[ImportMode]): + return ImportMode.HANDLERS in modes + + def __init__(self): + import dataall.modules.datasets.handlers + log.info("Dataset handlers have been imported") + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.dataset_sharing import SharingAsyncHandlersModuleInterface + + return [SharingAsyncHandlersModuleInterface, DatasetBaseModuleInterface] + + +class DatasetCdkModuleInterface(ModuleInterface): + """Loads dataset cdk stacks """ + + @staticmethod + def is_supported(modes: Set[ImportMode]): + return ImportMode.CDK in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.dataset_sharing import DataSharingCdkModuleInterface + return [DatasetBaseModuleInterface, DataSharingCdkModuleInterface] + + def __init__(self): + import dataall.modules.datasets.cdk + from dataall.core.environment.cdk.environment_stack import EnvironmentSetup + from dataall.modules.datasets.cdk.dataset_glue_profiler_extension import DatasetGlueProfilerExtension + from dataall.modules.datasets.cdk.dataset_custom_resources_extension import DatasetCustomResourcesExtension + + EnvironmentSetup.register(DatasetGlueProfilerExtension) + EnvironmentSetup.register(DatasetCustomResourcesExtension) + + log.info("Dataset stacks have been imported") + + +class DatasetStackUpdaterModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.STACK_UPDATER_TASK in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [DatasetBaseModuleInterface] + + def __init__(self): + from dataall.modules.datasets.tasks.dataset_stack_finder import DatasetStackFinder + + DatasetStackFinder() + log.info("Dataset stack updater task has been loaded") + + +class DatasetCatalogIndexerModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.CATALOG_INDEXER_TASK in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.catalog import CatalogIndexerModuleInterface + + return [DatasetBaseModuleInterface, CatalogIndexerModuleInterface] + + def __init__(self): + from dataall.modules.datasets.indexers.dataset_catalog_indexer import DatasetCatalogIndexer + + DatasetCatalogIndexer() + log.info("Dataset catalog indexer task has been loaded") diff --git a/backend/dataall/modules/datasets/api/__init__.py b/backend/dataall/modules/datasets/api/__init__.py new file mode 100644 index 000000000..cd34a2ac4 --- /dev/null +++ b/backend/dataall/modules/datasets/api/__init__.py @@ -0,0 +1,10 @@ +"""The GraphQL schema of datasets and related functionality""" +from dataall.modules.datasets.api import ( + table_column, + profiling, + storage_location, + table, + dataset +) + +__all__ = ["table_column", "profiling", "storage_location", "table", "dataset"] diff --git a/backend/dataall/modules/datasets/api/dataset/__init__.py b/backend/dataall/modules/datasets/api/dataset/__init__.py new file mode 100644 index 000000000..c0e99b6f6 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/__init__.py @@ -0,0 +1,10 @@ +from dataall.modules.datasets.api.dataset import ( + input_types, + mutations, + queries, + resolvers, + types, + enums +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations', 'enums'] diff --git a/backend/dataall/modules/datasets/api/dataset/enums.py b/backend/dataall/modules/datasets/api/dataset/enums.py new file mode 100644 index 000000000..523ec3906 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/enums.py @@ -0,0 +1,44 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class DatasetRole(GraphQLEnumMapper): + # Permissions on a dataset + BusinessOwner = '999' + DataSteward = '998' + Creator = '950' + Admin = '900' + Shared = '300' + NoPermission = '000' + + +class DatasetSortField(GraphQLEnumMapper): + label = 'label' + created = 'created' + updated = 'updated' + + +class ConfidentialityClassification(GraphQLEnumMapper): + Unclassified = 'Unclassified' + Official = 'Official' + Secret = 'Secret' + + +class Language(GraphQLEnumMapper): + English = 'English' + French = 'French' + German = 'German' + + +class Topic(GraphQLEnumMapper): + Finances = 'Finances' + HumanResources = 'HumanResources' + Products = 'Products' + Services = 'Services' + Operations = 'Operations' + Research = 'Research' + Sales = 'Sales' + Orders = 'Orders' + Sites = 'Sites' + Energy = 'Energy' + Customers = 'Customers' + Misc = 'Misc' diff --git a/backend/dataall/modules/datasets/api/dataset/input_types.py b/backend/dataall/modules/datasets/api/dataset/input_types.py new file mode 100644 index 000000000..d238a8103 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/input_types.py @@ -0,0 +1,107 @@ +from dataall.base.api import gql +from dataall.base.api.constants import SortDirection +from dataall.modules.datasets.api.dataset.enums import DatasetSortField + + +NewDatasetInput = gql.InputType( + name='NewDatasetInput', + arguments=[ + gql.Argument('label', gql.NonNullableType(gql.String)), + gql.Argument('organizationUri', gql.NonNullableType(gql.String)), + gql.Argument('environmentUri', gql.NonNullableType(gql.String)), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('owner', gql.String), + gql.Argument('language', gql.Ref('Language')), + gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), + gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), + gql.Argument(name='businessOwnerEmail', type=gql.String), + gql.Argument( + name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String) + ), + gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), + gql.Argument(name='stewards', type=gql.String), + ], +) + +ModifyDatasetInput = gql.InputType( + name='ModifyDatasetInput', + arguments=[ + gql.Argument('label', gql.String), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), + gql.Argument('terms', gql.ArrayType(gql.String)), + gql.Argument('businessOwnerDelegationEmails', gql.ArrayType(gql.String)), + gql.Argument('businessOwnerEmail', gql.String), + gql.Argument('language', gql.Ref('Language')), + gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), + gql.Argument(name='stewards', type=gql.String), + gql.Argument('KmsAlias', gql.NonNullableType(gql.String)), + ], +) + +DatasetSortCriteria = gql.InputType( + name='DatasetSortCriteria', + arguments=[ + gql.Argument( + name='field', type=gql.NonNullableType(DatasetSortField.toGraphQLEnum()) + ), + gql.Argument(name='direction', type=SortDirection.toGraphQLEnum()), + ], +) + + +DatasetFilter = gql.InputType( + name='DatasetFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('roles', gql.ArrayType(gql.Ref('DatasetRole'))), + gql.Argument('InProject', gql.String), + gql.Argument('notInProject', gql.String), + gql.Argument('displayArchived', gql.Boolean), + # gql.Argument("organization", gql.String), + # gql.Argument("environment", gql.String), + gql.Argument('sort', gql.ArrayType(DatasetSortCriteria)), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + ], +) + +DatasetPresignedUrlInput = gql.InputType( + name='DatasetPresignedUrlInput', + arguments=[ + gql.Field(name='fileName', type=gql.String), + gql.Field(name='prefix', type=gql.String), + ], +) + + +CrawlerInput = gql.InputType( + name='CrawlerInput', arguments=[gql.Argument(name='prefix', type=gql.String)] +) + +ImportDatasetInput = gql.InputType( + name='ImportDatasetInput', + arguments=[ + gql.Argument('label', gql.NonNullableType(gql.String)), + gql.Argument('organizationUri', gql.NonNullableType(gql.String)), + gql.Argument('environmentUri', gql.NonNullableType(gql.String)), + gql.Argument('description', gql.String), + gql.Argument('bucketName', gql.NonNullableType(gql.String)), + gql.Argument('glueDatabaseName', gql.String), + gql.Argument('KmsKeyAlias', gql.NonNullableType(gql.String)), + gql.Argument('adminRoleName', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('owner', gql.NonNullableType(gql.String)), + gql.Argument('language', gql.Ref('Language')), + gql.Argument('topics', gql.ArrayType(gql.Ref('Topic'))), + gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), + gql.Argument(name='businessOwnerEmail', type=gql.String), + gql.Argument( + name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String) + ), + gql.Argument('confidentiality', gql.Ref('ConfidentialityClassification')), + gql.Argument(name='stewards', type=gql.String), + ], +) diff --git a/backend/dataall/modules/datasets/api/dataset/mutations.py b/backend/dataall/modules/datasets/api/dataset/mutations.py new file mode 100644 index 000000000..56dcb7581 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/mutations.py @@ -0,0 +1,70 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.dataset.input_types import ( + ModifyDatasetInput, + NewDatasetInput, + ImportDatasetInput, +) +from dataall.modules.datasets.api.dataset.resolvers import ( + create_dataset, + update_dataset, + generate_dataset_access_token, + delete_dataset, + import_dataset, + start_crawler +) + +createDataset = gql.MutationField( + name='createDataset', + args=[gql.Argument(name='input', type=gql.NonNullableType(NewDatasetInput))], + type=gql.Ref('Dataset'), + resolver=create_dataset, + test_scope='Dataset', +) + +updateDataset = gql.MutationField( + name='updateDataset', + args=[ + gql.Argument(name='datasetUri', type=gql.String), + gql.Argument(name='input', type=ModifyDatasetInput), + ], + type=gql.Ref('Dataset'), + resolver=update_dataset, + test_scope='Dataset', +) + +generateDatasetAccessToken = gql.MutationField( + name='generateDatasetAccessToken', + args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=generate_dataset_access_token, +) + + +deleteDataset = gql.MutationField( + name='deleteDataset', + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='deleteFromAWS', type=gql.Boolean), + ], + resolver=delete_dataset, + type=gql.Boolean, +) + + +importDataset = gql.MutationField( + name='importDataset', + args=[gql.Argument(name='input', type=ImportDatasetInput)], + type=gql.Ref('Dataset'), + resolver=import_dataset, + test_scope='Dataset', +) + +StartGlueCrawler = gql.MutationField( + name='startGlueCrawler', + args=[ + gql.Argument(name='datasetUri', type=gql.String), + gql.Argument(name='input', type=gql.Ref('CrawlerInput')), + ], + resolver=start_crawler, + type=gql.Ref('GlueCrawler'), +) diff --git a/backend/dataall/modules/datasets/api/dataset/queries.py b/backend/dataall/modules/datasets/api/dataset/queries.py new file mode 100644 index 000000000..517a6366c --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/queries.py @@ -0,0 +1,83 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.dataset.input_types import DatasetFilter +from dataall.modules.datasets.api.dataset.resolvers import ( + get_dataset, + list_datasets, + get_dataset_assume_role_url, + get_file_upload_presigned_url, + list_dataset_share_objects, + list_datasets_owned_by_env_group, + list_datasets_created_in_environment, +) +from dataall.modules.datasets.api.dataset.types import DatasetSearchResult + +getDataset = gql.QueryField( + name='getDataset', + args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('Dataset'), + resolver=get_dataset, + test_scope='Dataset', +) + + +listDatasets = gql.QueryField( + name='listDatasets', + args=[gql.Argument('filter', DatasetFilter)], + type=DatasetSearchResult, + resolver=list_datasets, + test_scope='Dataset', +) + + +getDatasetAssumeRoleUrl = gql.QueryField( + name='getDatasetAssumeRoleUrl', + args=[gql.Argument(name='datasetUri', type=gql.String)], + type=gql.String, + resolver=get_dataset_assume_role_url, + test_scope='Dataset', +) + + +getDatasetPresignedUrl = gql.QueryField( + name='getDatasetPresignedUrl', + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('DatasetPresignedUrlInput')), + ], + type=gql.String, + resolver=get_file_upload_presigned_url, +) + +listShareObjects = gql.QueryField( + name='listDatasetShareObjects', + resolver=list_dataset_share_objects, + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='environmentUri', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + ], + type=gql.Ref('ShareSearchResult'), +) + +listDatasetsOwnedByEnvGroup = gql.QueryField( + name='listDatasetsOwnedByEnvGroup', + type=gql.Ref('DatasetSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='groupUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), + ], + resolver=list_datasets_owned_by_env_group, + test_scope='Dataset', +) + +listDatasetsCreatedInEnvironment = gql.QueryField( + name='listDatasetsCreatedInEnvironment', + type=gql.Ref('DatasetSearchResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DatasetFilter')), + ], + resolver=list_datasets_created_in_environment, + test_scope='Dataset', +) diff --git a/backend/dataall/modules/datasets/api/dataset/resolvers.py b/backend/dataall/modules/datasets/api/dataset/resolvers.py new file mode 100644 index 000000000..792cf224d --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/resolvers.py @@ -0,0 +1,206 @@ +import logging + +from dataall.core.stacks.api import stack_helper +from dataall.base.api.context import Context +from dataall.core.feature_toggle_checker import is_feature_enabled +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.base.db.exceptions import RequiredParameter, InvalidInput +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.datasets import Dataset +from dataall.modules.datasets.api.dataset.enums import DatasetRole +from dataall.modules.datasets.services.dataset_service import DatasetService + +log = logging.getLogger(__name__) + + +def create_dataset(context: Context, source, input=None): + RequestValidator.validate_creation_request(input) + + admin_group = input['SamlAdminGroupName'] + uri = input['environmentUri'] + return DatasetService.create_dataset(uri=uri, admin_group=admin_group, data=input) + + +def import_dataset(context: Context, source, input=None): + RequestValidator.validate_import_request(input) + + admin_group = input['SamlAdminGroupName'] + uri = input['environmentUri'] + return DatasetService.import_dataset(uri=uri, admin_group=admin_group, data=input) + + +def get_dataset(context, source, datasetUri=None): + return DatasetService.get_dataset(uri=datasetUri) + + +def resolve_user_role(context: Context, source: Dataset, **kwargs): + if not source: + return None + if source.owner == context.username: + return DatasetRole.Creator.value + elif source.SamlAdminGroupName in context.groups: + return DatasetRole.Admin.value + elif source.stewards in context.groups: + return DatasetRole.DataSteward.value + else: + with context.engine.scoped_session() as session: + share = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == source.datasetUri) + .first() + ) + if share and ( + share.owner == context.username or share.principalId in context.groups + ): + return DatasetRole.Shared.value + return DatasetRole.NoPermission.value + + +@is_feature_enabled('modules.datasets.features.file_uploads') +def get_file_upload_presigned_url( + context, source, datasetUri: str = None, input: dict = None +): + return DatasetService.get_file_upload_presigned_url(uri=datasetUri, data=input) + + +def list_datasets(context: Context, source, filter: dict = None): + if not filter: + filter = {'page': 1, 'pageSize': 5} + return DatasetService.list_datasets(filter) + + +def list_locations(context, source: Dataset, filter: dict = None): + if not source: + return None + if not filter: + filter = {'page': 1, 'pageSize': 5} + return DatasetService.list_locations(source.datasetUri, filter) + + +def list_tables(context, source: Dataset, filter: dict = None): + if not source: + return None + if not filter: + filter = {'page': 1, 'pageSize': 5} + return DatasetService.list_tables(source.datasetUri, filter) + + +def get_dataset_organization(context, source: Dataset, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + return Organization.get_organization_by_uri(session, source.organizationUri) + + +def get_dataset_environment(context, source: Dataset, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + return EnvironmentService.get_environment_by_uri(session, source.environmentUri) + + +def get_dataset_owners_group(context, source: Dataset, **kwargs): + if not source: + return None + return source.SamlAdminGroupName + + +def get_dataset_stewards_group(context, source: Dataset, **kwargs): + if not source: + return None + return source.stewards + + +def update_dataset(context, source, datasetUri: str = None, input: dict = None): + return DatasetService.update_dataset(uri=datasetUri, data=input) + + +def get_dataset_statistics(context: Context, source: Dataset, **kwargs): + if not source: + return None + return DatasetService.get_dataset_statistics(source) + + +@is_feature_enabled('modules.datasets.features.aws_actions') +def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): + return DatasetService.get_dataset_assume_role_url(uri=datasetUri) + + +def start_crawler(context: Context, source, datasetUri: str, input: dict = None): + return DatasetService.start_crawler(uri=datasetUri, data=input) + + +def list_dataset_share_objects(context, source, filter: dict = None): + if not source: + return None + if not filter: + filter = {'page': 1, 'pageSize': 5} + return DatasetService.list_dataset_share_objects(source, filter) + + +@is_feature_enabled('modules.datasets.features.aws_actions') +def generate_dataset_access_token(context, source, datasetUri: str = None): + return DatasetService.generate_dataset_access_token(uri=datasetUri) + + +def get_dataset_stack(context: Context, source: Dataset, **kwargs): + if not source: + return None + return stack_helper.get_stack_with_cfn_resources( + targetUri=source.datasetUri, + environmentUri=source.environmentUri, + ) + + +def delete_dataset( + context: Context, source, datasetUri: str = None, deleteFromAWS: bool = False +): + return DatasetService.delete_dataset(uri=datasetUri, delete_from_aws=deleteFromAWS) + + +def get_dataset_glossary_terms(context: Context, source: Dataset, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + return Glossary.get_glossary_terms_links(session, source.datasetUri, 'Dataset') + + +def list_datasets_created_in_environment( + context: Context, source, environmentUri: str = None, filter: dict = None +): + if not filter: + filter = {} + return DatasetService.list_datasets_created_in_environment(uri=environmentUri, data=filter) + + +def list_datasets_owned_by_env_group( + context, source, environmentUri: str = None, groupUri: str = None, filter: dict = None +): + if not filter: + filter = {} + return DatasetService.list_datasets_owned_by_env_group(environmentUri, groupUri, filter) + + +class RequestValidator: + @staticmethod + def validate_creation_request(data): + if not data: + raise RequiredParameter(data) + if not data.get('environmentUri'): + raise RequiredParameter('environmentUri') + if not data.get('SamlAdminGroupName'): + raise RequiredParameter('group') + if not data.get('label'): + raise RequiredParameter('label') + if len(data['label']) > 52: + raise InvalidInput( + 'Dataset name', data['label'], 'less than 52 characters' + ) + + @staticmethod + def validate_import_request(data): + RequestValidator.validate_creation_request(data) + if not data.get('bucketName'): + raise RequiredParameter('bucketName') diff --git a/backend/dataall/modules/datasets/api/dataset/types.py b/backend/dataall/modules/datasets/api/dataset/types.py new file mode 100644 index 000000000..ff02bc932 --- /dev/null +++ b/backend/dataall/modules/datasets/api/dataset/types.py @@ -0,0 +1,181 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.dataset.enums import DatasetRole +from dataall.modules.datasets.api.dataset.resolvers import ( + get_dataset_environment, + get_dataset_organization, + get_dataset_owners_group, + get_dataset_stewards_group, + list_tables, + list_locations, + resolve_user_role, + get_dataset_statistics, + list_dataset_share_objects, + get_dataset_glossary_terms, + get_dataset_stack +) +from dataall.core.environment.api.enums import EnvironmentPermission + + +DatasetStatistics = gql.ObjectType( + name='DatasetStatistics', + fields=[ + gql.Field(name='tables', type=gql.Integer), + gql.Field(name='locations', type=gql.Integer), + gql.Field(name='upvotes', type=gql.Integer), + ], +) + +Dataset = gql.ObjectType( + name='Dataset', + fields=[ + gql.Field(name='datasetUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='admins', type=gql.ArrayType(gql.String)), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='S3BucketName', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueCrawlerName', type=gql.String), + gql.Field(name='GlueCrawlerSchedule', type=gql.String), + gql.Field(name='GlueProfilingJobName', type=gql.String), + gql.Field(name='GlueProfilingTriggerSchedule', type=gql.String), + gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), + gql.Field(name='KmsAlias', type=gql.String), + gql.Field(name='bucketCreated', type=gql.Boolean), + gql.Field(name='glueDatabaseCreated', type=gql.Boolean), + gql.Field(name='iamAdminRoleCreated', type=gql.Boolean), + gql.Field(name='lakeformationLocationCreated', type=gql.Boolean), + gql.Field(name='bucketPolicyCreated', type=gql.Boolean), + gql.Field(name='SamlAdminGroupName', type=gql.String), + gql.Field(name='businessOwnerEmail', type=gql.String), + gql.Field(name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String)), + gql.Field(name='importedS3Bucket', type=gql.Boolean), + gql.Field(name='importedGlueDatabase', type=gql.Boolean), + gql.Field(name='importedKmsKey', type=gql.Boolean), + gql.Field(name='importedAdminRole', type=gql.Boolean), + gql.Field(name='imported', type=gql.Boolean), + gql.Field( + name='environment', + type=gql.Ref('Environment'), + resolver=get_dataset_environment, + ), + gql.Field( + name='organization', + type=gql.Ref('Organization'), + resolver=get_dataset_organization, + ), + gql.Field( + name='owners', + type=gql.String, + resolver=get_dataset_owners_group, + ), + gql.Field( + name='stewards', + type=gql.String, + resolver=get_dataset_stewards_group, + ), + gql.Field( + name='tables', + type=gql.Ref('DatasetTableSearchResult'), + args=[gql.Argument(name='filter', type=gql.Ref('DatasetTableFilter'))], + resolver=list_tables, + test_scope='Dataset', + ), + gql.Field( + name='locations', + type=gql.Ref('DatasetStorageLocationSearchResult'), + args=[ + gql.Argument( + name='filter', type=gql.Ref('DatasetStorageLocationFilter') + ) + ], + resolver=list_locations, + test_scope='Dataset', + ), + gql.Field( + name='userRoleForDataset', + type=DatasetRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field( + name='userRoleInEnvironment', type=EnvironmentPermission.toGraphQLEnum() + ), + gql.Field( + name='statistics', type=DatasetStatistics, resolver=get_dataset_statistics + ), + gql.Field( + name='shares', + args=[gql.Argument(name='filter', type=gql.Ref('ShareObjectFilter'))], + type=gql.Ref('ShareSearchResult'), + resolver=list_dataset_share_objects, + test_scope='ShareObject', + test_cases=[ + 'anonymous', + 'businessowner', + 'admins', + 'stewards', + 'unauthorized', + ], + ), + gql.Field( + name='terms', + resolver=get_dataset_glossary_terms, + type=gql.Ref('TermSearchResult'), + ), + gql.Field(name='topics', type=gql.ArrayType(gql.Ref('Topic'))), + gql.Field( + name='confidentiality', type=gql.Ref('ConfidentialityClassification') + ), + gql.Field(name='language', type=gql.Ref('Language')), + gql.Field( + name='projectPermission', + args=[ + gql.Argument(name='projectUri', type=gql.NonNullableType(gql.String)) + ], + type=gql.Ref('DatasetRole'), + ), + gql.Field( + name='isPublishedInEnvironment', + args=[ + gql.Argument( + name='environmentUri', type=gql.NonNullableType(gql.String) + ) + ], + type=gql.Boolean, + ), + gql.Field(name='stack', type=gql.Ref('Stack'), resolver=get_dataset_stack), + ], +) + + +DatasetSearchResult = gql.ObjectType( + name='DatasetSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(Dataset)), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) + + +GlueCrawler = gql.ObjectType( + name='GlueCrawler', + fields=[ + gql.Field(name='Name', type=gql.ID), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='status', type=gql.String), + ], +) diff --git a/backend/dataall/modules/datasets/api/profiling/__init__.py b/backend/dataall/modules/datasets/api/profiling/__init__.py new file mode 100644 index 000000000..3706f9a1e --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.datasets.api.profiling import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/datasets/api/profiling/input_types.py b/backend/dataall/modules/datasets/api/profiling/input_types.py new file mode 100644 index 000000000..52d31e832 --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/input_types.py @@ -0,0 +1,20 @@ +from dataall.base.api import gql + +StartDatasetProfilingRunInput = gql.InputType( + name='StartDatasetProfilingRunInput', + arguments=[ + gql.Argument('datasetUri', gql.NonNullableType(gql.String)), + gql.Argument('GlueTableName', gql.String), + gql.Argument('tableUri', gql.String), + ], +) + + +DatasetProfilingRunFilter = gql.InputType( + name='DatasetProfilingRunFilter', + arguments=[ + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + gql.Argument(name='term', type=gql.String), + ], +) diff --git a/backend/dataall/modules/datasets/api/profiling/mutations.py b/backend/dataall/modules/datasets/api/profiling/mutations.py new file mode 100644 index 000000000..861f238cb --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/mutations.py @@ -0,0 +1,9 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.profiling.resolvers import start_profiling_run + +startDatasetProfilingRun = gql.MutationField( + name='startDatasetProfilingRun', + args=[gql.Argument(name='input', type=gql.Ref('StartDatasetProfilingRunInput'))], + type=gql.Ref('DatasetProfilingRun'), + resolver=start_profiling_run, +) diff --git a/backend/dataall/modules/datasets/api/profiling/queries.py b/backend/dataall/modules/datasets/api/profiling/queries.py new file mode 100644 index 000000000..3e20a335e --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/queries.py @@ -0,0 +1,19 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.profiling.resolvers import ( + list_table_profiling_runs, + get_dataset_table_profiling_run +) + +listDatasetTableProfilingRuns = gql.QueryField( + name='listDatasetTableProfilingRuns', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetProfilingRunSearchResults'), + resolver=list_table_profiling_runs, +) + +getDatasetTableLastProfilingRun = gql.QueryField( + name='getDatasetTableProfilingRun', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetProfilingRun'), + resolver=get_dataset_table_profiling_run, +) diff --git a/backend/dataall/modules/datasets/api/profiling/resolvers.py b/backend/dataall/modules/datasets/api/profiling/resolvers.py new file mode 100644 index 000000000..84e1df164 --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/resolvers.py @@ -0,0 +1,49 @@ +import json +import logging + +from dataall.base.api.context import Context +from dataall.base.db.exceptions import RequiredParameter +from dataall.modules.datasets.services.dataset_profiling_service import DatasetProfilingService +from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun + +log = logging.getLogger(__name__) + + +def resolve_dataset(context, source: DatasetProfilingRun): + if not source: + return None + return DatasetService.get_dataset(uri=source.datasetUri) + + +def start_profiling_run(context: Context, source, input: dict = None): + if 'datasetUri' not in input: + raise RequiredParameter('datasetUri') + + return DatasetProfilingService.start_profiling_run( + uri=input['datasetUri'], + table_uri=input.get('tableUri'), + glue_table_name=input.get('GlueTableName') + ) + + +def resolve_profiling_run_status(context: Context, source: DatasetProfilingRun): + if not source: + return None + DatasetProfilingService.resolve_profiling_run_status(source.profilingRunUri) + return source.status + + +def resolve_profiling_results(context: Context, source: DatasetProfilingRun): + if not source or source.results == {}: + return None + else: + return json.dumps(source.results) + + +def get_dataset_table_profiling_run(context: Context, source, tableUri=None): + return DatasetProfilingService.get_dataset_table_profiling_run(uri=tableUri) + + +def list_table_profiling_runs(context: Context, source, tableUri=None): + return DatasetProfilingService.list_table_profiling_runs(uri=tableUri) diff --git a/backend/dataall/modules/datasets/api/profiling/types.py b/backend/dataall/modules/datasets/api/profiling/types.py new file mode 100644 index 000000000..bc942eb7b --- /dev/null +++ b/backend/dataall/modules/datasets/api/profiling/types.py @@ -0,0 +1,38 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.profiling.resolvers import ( + resolve_dataset, + resolve_profiling_run_status, + resolve_profiling_results, +) + +DatasetProfilingRun = gql.ObjectType( + name='DatasetProfilingRun', + fields=[ + gql.Field(name='profilingRunUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='GlueJobName', type=gql.String), + gql.Field(name='GlueJobRunId', type=gql.String), + gql.Field(name='GlueTriggerSchedule', type=gql.String), + gql.Field(name='GlueTriggerName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='results', type=gql.String, resolver=resolve_profiling_results), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field('status', type=gql.String, resolver=resolve_profiling_run_status), + gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), + ], +) + +DatasetProfilingRunSearchResults = gql.ObjectType( + name='DatasetProfilingRunSearchResults', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(DatasetProfilingRun)), + ], +) diff --git a/backend/dataall/modules/datasets/api/storage_location/__init__.py b/backend/dataall/modules/datasets/api/storage_location/__init__.py new file mode 100644 index 000000000..a2d3ea55f --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.datasets.api.storage_location import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/datasets/api/storage_location/input_types.py b/backend/dataall/modules/datasets/api/storage_location/input_types.py new file mode 100644 index 000000000..99eb89686 --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/input_types.py @@ -0,0 +1,41 @@ +from dataall.base.api import gql + +NewDatasetStorageLocationInput = gql.InputType( + name='NewDatasetStorageLocationInput', + arguments=[ + gql.Argument('label', gql.NonNullableType(gql.String)), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('terms', gql.ArrayType(gql.String)), + gql.Argument('prefix', gql.NonNullableType(gql.String)), + ], +) + +ModifyDatasetFolderInput = gql.InputType( + name='ModifyDatasetStorageLocationInput', + arguments=[ + gql.Argument('locationUri', gql.String), + gql.Argument('label', gql.String), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('terms', gql.ArrayType(gql.String)), + ], +) + +DatasetStorageLocationFilter = gql.InputType( + name='DatasetStorageLocationFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + ], +) + + +DatasetAccessPointFilter = gql.InputType( + name='DatasetAccessPointFilter', + arguments=[ + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) diff --git a/backend/dataall/modules/datasets/api/storage_location/mutations.py b/backend/dataall/modules/datasets/api/storage_location/mutations.py new file mode 100644 index 000000000..1185efabb --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/mutations.py @@ -0,0 +1,39 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.storage_location.input_types import ( + ModifyDatasetFolderInput, + NewDatasetStorageLocationInput, +) +from dataall.modules.datasets.api.storage_location.resolvers import ( + create_storage_location, + update_storage_location, + remove_storage_location, +) +from dataall.modules.datasets.api.storage_location.types import DatasetStorageLocation + +createDatasetStorageLocation = gql.MutationField( + name='createDatasetStorageLocation', + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=NewDatasetStorageLocationInput), + ], + type=gql.Thunk(lambda: DatasetStorageLocation), + resolver=create_storage_location, +) + +updateDatasetStorageLocation = gql.MutationField( + name='updateDatasetStorageLocation', + args=[ + gql.Argument(name='locationUri', type=gql.String), + gql.Argument(name='input', type=ModifyDatasetFolderInput), + ], + type=gql.Thunk(lambda: DatasetStorageLocation), + resolver=update_storage_location, +) + + +deleteDatasetStorageLocation = gql.MutationField( + name='deleteDatasetStorageLocation', + args=[gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String))], + resolver=remove_storage_location, + type=gql.Boolean, +) diff --git a/backend/dataall/modules/datasets/api/storage_location/queries.py b/backend/dataall/modules/datasets/api/storage_location/queries.py new file mode 100644 index 000000000..e56630101 --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/queries.py @@ -0,0 +1,9 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.storage_location.resolvers import get_storage_location + +getDatasetStorageLocation = gql.QueryField( + name='getDatasetStorageLocation', + args=[gql.Argument(name='locationUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetStorageLocation'), + resolver=get_storage_location, +) diff --git a/backend/dataall/modules/datasets/api/storage_location/resolvers.py b/backend/dataall/modules/datasets/api/storage_location/resolvers.py new file mode 100644 index 000000000..cf7b0a661 --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/resolvers.py @@ -0,0 +1,63 @@ +from dataall.base.api.context import Context +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.base.db.exceptions import RequiredParameter +from dataall.core.feature_toggle_checker import is_feature_enabled +from dataall.modules.datasets.services.dataset_location_service import DatasetLocationService +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, Dataset + + +@is_feature_enabled('modules.datasets.features.file_actions') +def create_storage_location( + context, source, datasetUri: str = None, input: dict = None +): + if 'prefix' not in input: + raise RequiredParameter('prefix') + if 'label' not in input: + raise RequiredParameter('label') + + return DatasetLocationService.create_storage_location(uri=datasetUri, data=input) + + +@is_feature_enabled('modules.datasets.features.file_actions') +def list_dataset_locations(context, source, filter: dict = None): + if not source: + return None + if not filter: + filter = {} + return DatasetLocationService.list_dataset_locations(uri=source.datasetUri, filter=filter) + + +@is_feature_enabled('modules.datasets.features.file_actions') +def get_storage_location(context, source, locationUri=None): + return DatasetLocationService.get_storage_location(uri=locationUri) + + +@is_feature_enabled('modules.datasets.features.file_actions') +def update_storage_location( + context, source, locationUri: str = None, input: dict = None +): + return DatasetLocationService.update_storage_location(uri=locationUri, data=input) + + +@is_feature_enabled('modules.datasets.features.file_actions') +def remove_storage_location(context, source, locationUri: str = None): + return DatasetLocationService.remove_storage_location(uri=locationUri) + + +def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + d = session.query(Dataset).get(source.datasetUri) + return d + + +def resolve_glossary_terms( + context: Context, source: DatasetStorageLocation, **kwargs +): + if not source: + return None + with context.engine.scoped_session() as session: + return Glossary.get_glossary_terms_links( + session, source.locationUri, 'DatasetStorageLocation' + ) diff --git a/backend/dataall/modules/datasets/api/storage_location/types.py b/backend/dataall/modules/datasets/api/storage_location/types.py new file mode 100644 index 000000000..5c67c9d0e --- /dev/null +++ b/backend/dataall/modules/datasets/api/storage_location/types.py @@ -0,0 +1,79 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.storage_location.resolvers import ( + resolve_glossary_terms, + resolve_dataset +) + +DatasetStorageLocation = gql.ObjectType( + name='DatasetStorageLocation', + fields=[ + gql.Field(name='locationUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='S3BucketName', type=gql.String), + gql.Field(name='S3Prefix', type=gql.String), + gql.Field(name='locationCreated', type=gql.Boolean), + gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), + gql.Field(name='userRoleForStorageLocation', type=gql.Ref('DatasetRole')), + gql.Field(name='environmentEndPoint', type=gql.String), + gql.Field( + name='terms', + type=gql.Ref('TermSearchResult'), + resolver=resolve_glossary_terms, + ), + ], +) + + +DatasetStorageLocationSearchResult = gql.ObjectType( + name='DatasetStorageLocationSearchResult', + fields=[ + gql.Field(name='nodes', type=gql.ArrayType(DatasetStorageLocation)), + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) + + +DatasetAccessPoint = gql.ObjectType( + name='DatasetAccessPoint', + fields=[ + gql.Field(name='accessPointUri', type=gql.ID), + gql.Field(name='location', type=DatasetStorageLocation), + gql.Field(name='dataset', type=gql.Ref('Dataset')), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='S3BucketName', type=gql.String), + gql.Field(name='S3Prefix', type=gql.String), + gql.Field(name='S3AccessPointName', type=gql.String), + ], +) + + +DatasetAccessPointSearchResult = gql.ObjectType( + name='DatasetAccessPointSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Integer), + gql.Field(name='hasPrevious', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(DatasetAccessPoint)), + ], +) diff --git a/backend/dataall/modules/datasets/api/table/__init__.py b/backend/dataall/modules/datasets/api/table/__init__.py new file mode 100644 index 000000000..e4c8469c2 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/__init__.py @@ -0,0 +1,10 @@ +from dataall.modules.datasets.api.table import ( + input_types, + mutations, + queries, + resolvers, + types, + enums, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations', 'enums'] diff --git a/backend/dataall/modules/datasets/api/table/enums.py b/backend/dataall/modules/datasets/api/table/enums.py new file mode 100644 index 000000000..025414468 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/enums.py @@ -0,0 +1,15 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class DatasetSortField(GraphQLEnumMapper): + created = 'created' + updated = 'updated' + label = 'label' + + +class DatasetTablePreviewStatus(GraphQLEnumMapper): + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' + FAILED = 'FAILED' + CANCELLED = 'CANCELLED' diff --git a/backend/dataall/modules/datasets/api/table/input_types.py b/backend/dataall/modules/datasets/api/table/input_types.py new file mode 100644 index 000000000..fb90403bc --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/input_types.py @@ -0,0 +1,34 @@ +from dataall.base.api import gql +from dataall.base.api.constants import SortDirection +from dataall.modules.datasets.api.table.enums import DatasetSortField + + +ModifyDatasetTableInput = gql.InputType( + name='ModifyDatasetTableInput', + arguments=[ + gql.Argument('label', gql.String), + gql.Argument('prefix', gql.String), + gql.Argument('description', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('terms', gql.ArrayType(gql.String)), + gql.Argument('topics', gql.ArrayType(gql.String)), + ], +) + +DatasetSortCriteria = gql.InputType( + name='DatasetSortCriteria', + arguments=[ + gql.Argument(name='field', type=DatasetSortField.toGraphQLEnum()), + gql.Argument(name='direction', type=SortDirection.toGraphQLEnum()), + ], +) + +DatasetTableFilter = gql.InputType( + name='DatasetTableFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('sort', gql.ArrayType(DatasetSortCriteria)), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + ], +) diff --git a/backend/dataall/modules/datasets/api/table/mutations.py b/backend/dataall/modules/datasets/api/table/mutations.py new file mode 100644 index 000000000..245bdc0b4 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/mutations.py @@ -0,0 +1,29 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.table.input_types import ModifyDatasetTableInput +from dataall.modules.datasets.api.table.resolvers import ( + update_table, delete_table, sync_tables, +) + +updateDatasetTable = gql.MutationField( + name='updateDatasetTable', + args=[ + gql.Argument(name='tableUri', type=gql.String), + gql.Argument(name='input', type=ModifyDatasetTableInput), + ], + type=gql.Ref('DatasetTable'), + resolver=update_table, +) + +deleteDatasetTable = gql.MutationField( + name='deleteDatasetTable', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + type=gql.Boolean, + resolver=delete_table, +) + +syncTables = gql.MutationField( + name='syncTables', + args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetTableSearchResult'), + resolver=sync_tables, +) diff --git a/backend/dataall/modules/datasets/api/table/queries.py b/backend/dataall/modules/datasets/api/table/queries.py new file mode 100644 index 000000000..ace8aa065 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/queries.py @@ -0,0 +1,52 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.table.input_types import DatasetTableFilter +from dataall.modules.datasets.api.table.resolvers import ( + get_table, + list_shared_tables_by_env_dataset, + preview +) +from dataall.modules.datasets.api.table.types import ( + DatasetTable, + DatasetTableSearchResult, +) + +getDatasetTable = gql.QueryField( + name='getDatasetTable', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + type=gql.Thunk(lambda: DatasetTable), + resolver=get_table, +) + + +listDatasetTables = gql.QueryField( + name='listDatasetTables', + args=[gql.Argument('filter', DatasetTableFilter)], + type=DatasetTableSearchResult, + resolver=lambda *_, **__: None, +) + + +QueryPreviewResult = gql.ObjectType( + name='QueryPreviewResult', + fields=[ + gql.Field(name='fields', type=gql.ArrayType(gql.String)), + gql.Field(name='rows', type=gql.ArrayType(gql.String)), + ], +) + +previewTable = gql.QueryField( + name='previewTable', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + resolver=preview, + type=gql.Ref('QueryPreviewResult'), +) + +getSharedDatasetTables = gql.QueryField( + name='getSharedDatasetTables', + args=[ + gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='envUri', type=gql.NonNullableType(gql.String)) + ], + type=gql.ArrayType(gql.Ref('SharedDatasetTableItem')), + resolver=list_shared_tables_by_env_dataset, +) diff --git a/backend/dataall/modules/datasets/api/table/resolvers.py b/backend/dataall/modules/datasets/api/table/resolvers.py new file mode 100644 index 000000000..f1638c161 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/resolvers.py @@ -0,0 +1,62 @@ +import logging + +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.modules.datasets.api.dataset.resolvers import get_dataset +from dataall.base.api.context import Context +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset + +log = logging.getLogger(__name__) + + +def get_table(context, source: Dataset, tableUri: str = None): + return DatasetTableService.get_table(uri=tableUri) + + +def update_table(context, source, tableUri: str = None, input: dict = None): + return DatasetTableService.update_table(uri=tableUri, table_data=input) + + +def delete_table(context, source, tableUri: str = None): + if not tableUri: + return False + return DatasetTableService.delete_table(uri=tableUri) + + +def preview(context, source, tableUri: str = None): + if not tableUri: + return None + return DatasetTableService.preview(table_uri=tableUri) + + +def get_glue_table_properties(context: Context, source: DatasetTable, **kwargs): + if not source: + return None + return DatasetTableService.get_glue_table_properties(uri=source.tableUri) + + +def sync_tables(context: Context, source, datasetUri: str = None): + return DatasetTableService.sync_tables_for_dataset(uri=datasetUri) + + +def resolve_dataset(context, source: DatasetTable, **kwargs): + if not source: + return None + + dataset_with_role = get_dataset(context, source=None, datasetUri=source.datasetUri) + if not dataset_with_role: + return None + return dataset_with_role + + +def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + return Glossary.get_glossary_terms_links( + session, source.tableUri, 'DatasetTable' + ) + + +def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str): + return DatasetTableService.list_shared_tables_by_env_dataset(datasetUri, envUri) diff --git a/backend/dataall/modules/datasets/api/table/types.py b/backend/dataall/modules/datasets/api/table/types.py new file mode 100644 index 000000000..d7e1fabb4 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table/types.py @@ -0,0 +1,120 @@ +from dataall.modules.datasets.api.table_column.resolvers import list_table_columns +from dataall.base.api import gql +from dataall.modules.datasets.api.table.resolvers import ( + resolve_dataset, + get_glue_table_properties, + resolve_glossary_terms +) +from dataall.modules.datasets.api.table.enums import DatasetTablePreviewStatus + + +TablePermission = gql.ObjectType( + name='TablePermission', + fields=[ + gql.Field(name='userName', type=gql.String), + gql.Field(name='created', type=gql.String), + ], +) + +TablePermissionSearchResult = gql.ObjectType( + name='TablePermissionSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(TablePermission)), + ], +) + + +DatasetTable = gql.ObjectType( + name='DatasetTable', + fields=[ + gql.Field(name='tableUri', type=gql.ID), + gql.Field(name='datasetUri', type=gql.String), + gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='admins', type=gql.ArrayType(gql.String)), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='LastGlueTableStatus', type=gql.String), + gql.Field(name='S3Prefix', type=gql.String), + gql.Field(name='GlueTableConfig', type=gql.String), + gql.Field( + name='GlueTableProperties', + type=gql.String, + resolver=get_glue_table_properties, + ), + gql.Field(name='region', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='userRoleForTable', type=gql.Ref('DatasetRole')), + gql.Field(name='stage', type=gql.String), + gql.Field( + name='columns', + resolver=list_table_columns, + type=gql.Ref('DatasetTableColumnSearchResult'), + ), + gql.Field( + name='terms', + type=gql.Ref('TermSearchResult'), + resolver=resolve_glossary_terms, + ), + ], +) + + +DatasetTableSearchResult = gql.ObjectType( + name='DatasetTableSearchResult', + fields=[ + gql.Field(name='nodes', type=gql.ArrayType(DatasetTable)), + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) + + +DatasetTableRecord = gql.ObjectType( + name='DatasetTableRecord', fields=[gql.Field(name='data', type=gql.String)] +) + +DatasetTableMetadataItem = gql.ObjectType( + name='DatasetTableMetadataItem', + fields=[ + gql.Field(name='Name', type=gql.String), + gql.Field(name='Type', type=gql.String), + ], +) + + +DatasetTablePreviewResult = gql.ObjectType( + name='DatasetTablePreviewResult', + fields=[ + gql.Field(name='queryExecutionId', type=gql.ID), + gql.Field(name='status', type=DatasetTablePreviewStatus.toGraphQLEnum()), + gql.Field(name='count', type=gql.Integer), + gql.Field(name='nodes', type=gql.ArrayType(DatasetTableRecord)), + gql.Field(name='schema', type=gql.ArrayType(DatasetTableMetadataItem)), + gql.Field(name='pageSize', type=gql.Integer), + gql.Field(name='nextPage', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='previousPage', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) + +SharedDatasetTableItem = gql.ObjectType( + name='SharedDatasetTableItem', + fields=[ + gql.Field(name='tableUri', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + ] +) diff --git a/backend/dataall/modules/datasets/api/table_column/__init__.py b/backend/dataall/modules/datasets/api/table_column/__init__.py new file mode 100644 index 000000000..214930e65 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/__init__.py @@ -0,0 +1,9 @@ +from dataall.modules.datasets.api.table_column import ( + input_types, + mutations, + queries, + resolvers, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/datasets/api/table_column/input_types.py b/backend/dataall/modules/datasets/api/table_column/input_types.py new file mode 100644 index 000000000..ca32c83f9 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/input_types.py @@ -0,0 +1,20 @@ +from dataall.base.api import gql + +DatasetTableColumnFilter = gql.InputType( + name='DatasetTableColumnFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + ], +) + +DatasetTableColumnInput = gql.InputType( + name='DatasetTableColumnInput', + arguments=[ + gql.Argument('description', gql.String), + gql.Argument('classification', gql.Integer), + gql.Argument('tags', gql.Integer), + gql.Argument('topics', gql.Integer), + ], +) diff --git a/backend/dataall/modules/datasets/api/table_column/mutations.py b/backend/dataall/modules/datasets/api/table_column/mutations.py new file mode 100644 index 000000000..10c4ec488 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/mutations.py @@ -0,0 +1,23 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.table_column.resolvers import ( + sync_table_columns, + update_table_column +) + +syncDatasetTableColumns = gql.MutationField( + name='syncDatasetTableColumns', + args=[gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String))], + type=gql.Ref('DatasetTableColumnSearchResult'), + resolver=sync_table_columns, +) + + +updateDatasetTableColumn = gql.MutationField( + name='updateDatasetTableColumn', + args=[ + gql.Argument(name='columnUri', type=gql.String), + gql.Argument(name='input', type=gql.Ref('DatasetTableColumnInput')), + ], + type=gql.Ref('DatasetTableColumn'), + resolver=update_table_column, +) diff --git a/backend/dataall/modules/datasets/api/table_column/queries.py b/backend/dataall/modules/datasets/api/table_column/queries.py new file mode 100644 index 000000000..5e2358cb7 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/queries.py @@ -0,0 +1,12 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.table_column.resolvers import list_table_columns + +listDatasetTableColumns = gql.QueryField( + name='listDatasetTableColumns', + args=[ + gql.Argument(name='tableUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='filter', type=gql.Ref('DatasetTableColumnFilter')), + ], + type=gql.Ref('DatasetTableColumnSearchResult'), + resolver=list_table_columns, +) diff --git a/backend/dataall/modules/datasets/api/table_column/resolvers.py b/backend/dataall/modules/datasets/api/table_column/resolvers.py new file mode 100644 index 000000000..abd021a35 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/resolvers.py @@ -0,0 +1,47 @@ +from dataall.base.api.context import Context +from dataall.modules.catalog.db.glossary_models import TermLink +from dataall.base.db import paginate +from dataall.modules.datasets.services.dataset_column_service import DatasetColumnService +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn, DatasetTable + + +def list_table_columns( + context: Context, + source: DatasetTable, + tableUri: str = None, + filter: dict = None, +): + if source: + tableUri = source.tableUri + if not filter: + filter = {} + return DatasetColumnService.paginate_active_columns_for_table(uri=tableUri, filter=filter) + + +def sync_table_columns(context: Context, source, tableUri: str = None): + if tableUri is None: + return None + return DatasetColumnService.sync_table_columns(table_uri=tableUri) + + +def resolve_terms(context, source: DatasetTableColumn, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + q = session.query(TermLink).filter( + TermLink.targetUri == source.columnUri + ) + return paginate(q, page=1, page_size=15).to_dict() + + +def update_table_column( + context: Context, source, columnUri: str = None, input: dict = None +): + if columnUri is None: + return None + + if input is None: + input = {} + + description = input.get('description', 'No description provided') + return DatasetColumnService.update_table_column_description(column_uri=columnUri, description=description) diff --git a/backend/dataall/modules/datasets/api/table_column/types.py b/backend/dataall/modules/datasets/api/table_column/types.py new file mode 100644 index 000000000..692bbc6d3 --- /dev/null +++ b/backend/dataall/modules/datasets/api/table_column/types.py @@ -0,0 +1,42 @@ +from dataall.base.api import gql +from dataall.modules.datasets.api.table_column.resolvers import resolve_terms + + +DatasetTableColumn = gql.ObjectType( + name='DatasetTableColumn', + fields=[ + gql.Field(name='tableUri', type=gql.ID), + gql.Field(name='columnUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='typeName', type=gql.String), + gql.Field(name='columnType', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='classification', type=gql.String), + gql.Field(name='topics', type=gql.ArrayType(gql.String)), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field( + name='terms', type=gql.Ref('TermLinkSearchResults'), resolver=resolve_terms + ), + ], +) + + +DatasetTableColumnSearchResult = gql.ObjectType( + name='DatasetTableColumnSearchResult', + fields=[ + gql.Field(name='nodes', type=gql.ArrayType(DatasetTableColumn)), + gql.Field(name='count', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + ], +) diff --git a/backend/dataall/modules/datasets/aws/__init__.py b/backend/dataall/modules/datasets/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets/aws/athena_table_client.py b/backend/dataall/modules/datasets/aws/athena_table_client.py new file mode 100644 index 000000000..fe55a48f7 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/athena_table_client.py @@ -0,0 +1,59 @@ +import json +import logging +from pyathena import connect + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.datasets_base.db.dataset_models import DatasetTable +from dataall.base.utils import json_utils, sql_utils + +log = logging.getLogger(__name__) + + +class AthenaTableClient: + + def __init__(self, env: Environment, table: DatasetTable): + session = SessionHelper.remote_session(accountid=table.AWSAccountId) + self._client = session.client('athena', region_name=env.region) + self._creds = session.get_credentials() + self._env = env + self._table = table + + def get_table(self, dataset_uri): + env = self._env + table = self._table + creds = self._creds + + env_workgroup = {} + try: + env_workgroup = self._client.get_work_group(WorkGroup=env.EnvironmentDefaultAthenaWorkGroup) + except ClientError as e: + log.info( + f'Workgroup {env.EnvironmentDefaultAthenaWorkGroup} can not be found' + f'due to: {e}' + ) + + connection = connect( + aws_access_key_id=creds.access_key, + aws_secret_access_key=creds.secret_key, + aws_session_token=creds.token, + work_group=env_workgroup.get('WorkGroup', {}).get('Name', 'primary'), + s3_staging_dir=f's3://{env.EnvironmentDefaultBucketName}/preview/{dataset_uri}/{table.tableUri}', + region_name=table.region, + ) + cursor = connection.cursor() + + sql = 'select * from {table_identifier} limit 50'.format( + table_identifier=sql_utils.Identifier(table.GlueDatabaseName, table.GlueTableName) + ) + cursor.execute(sql) + fields = [] + for f in cursor.description: + fields.append(json.dumps({'name': f[0]})) + rows = [] + for row in cursor: + rows.append(json.dumps(json_utils.to_json(list(row)))) + + return {'rows': rows, 'fields': fields} diff --git a/backend/dataall/modules/datasets/aws/glue_dataset_client.py b/backend/dataall/modules/datasets/aws/glue_dataset_client.py new file mode 100644 index 000000000..aecd5b555 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/glue_dataset_client.py @@ -0,0 +1,104 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) + + +class DatasetCrawler: + def __init__(self, dataset: Dataset): + session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) + region = dataset.region if dataset.region else 'eu-west-1' + self._client = session.client('glue', region_name=region) + self._dataset = dataset + + def get_crawler(self, crawler_name=None): + crawler = None + if not crawler_name: + crawler_name = self._dataset.GlueCrawlerName + + try: + crawler = self._client.get_crawler(Name=crawler_name) + except ClientError as e: + if e.response['Error']['Code'] == 'EntityNotFoundException': + log.debug(f'Crawler does not exists {crawler_name} %s', e) + else: + raise e + return crawler.get('Crawler') if crawler else None + + def start_crawler(self): + crawler_name = self._dataset.GlueCrawlerName + try: + crawler = self.get_crawler() + self._client.start_crawler(Name=crawler_name) + log.info('Crawler %s started ', crawler_name) + return crawler + except ClientError as e: + log.error('Failed to start Crawler due to %s', e) + raise e + + def update_crawler(self, targets): + dataset = self._dataset + crawler_name = dataset.GlueCrawlerName + try: + self._client.stop_crawler(Name=crawler_name) + except ClientError as e: + if ( + e.response['Error']['Code'] == 'CrawlerStoppingException' + or e.response['Error']['Code'] == 'CrawlerNotRunningException' + ): + log.error('Failed to stop crawler %s', e) + try: + self._client.update_crawler( + Name=crawler_name, + Role=self._dataset.IAMDatasetAdminRoleArn, + DatabaseName=dataset.GlueDatabaseName, + Targets=targets, + ) + log.info('Crawler %s updated ', crawler_name) + except ClientError as e: + log.debug('Failed to stop and update crawler %s', e) + if e.response['Error']['Code'] != 'CrawlerRunningException': + log.error('Failed to update crawler %s', e) + else: + raise e + + def list_glue_database_tables(self): + dataset = self._dataset + database = dataset.GlueDatabaseName + account_id = dataset.AwsAccountId + found_tables = [] + try: + log.debug(f'Looking for {database} tables') + + if not self.database_exists(): + return found_tables + + paginator = self._client.get_paginator('get_tables') + + pages = paginator.paginate( + DatabaseName=database, + CatalogId=account_id, + ) + for page in pages: + found_tables.extend(page['TableList']) + + log.debug(f'Retrieved all database {database} tables: {found_tables}') + + except ClientError as e: + log.error( + f'Failed to retrieve tables for database {account_id}|{database}: {e}', + exc_info=True, + ) + return found_tables + + def database_exists(self): + dataset = self._dataset + try: + self._client.get_database(CatalogId=dataset.AwsAccountId, Name=dataset.GlueDatabaseName) + return True + except ClientError: + log.info(f'Database {dataset.GlueDatabaseName} does not exist on account {dataset.AwsAccountId}...') + return False diff --git a/backend/dataall/modules/datasets/aws/glue_profiler_client.py b/backend/dataall/modules/datasets/aws/glue_profiler_client.py new file mode 100644 index 000000000..c35868aac --- /dev/null +++ b/backend/dataall/modules/datasets/aws/glue_profiler_client.py @@ -0,0 +1,40 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.datasets import Dataset +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun + +log = logging.getLogger(__name__) + + +class GlueDatasetProfilerClient: + """Controls glue profiling jobs in AWS""" + + def __init__(self, dataset: Dataset): + session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) + self._client = session.client('glue', region_name=dataset.region) + self._name = dataset.GlueProfilingJobName + + def get_job_status(self, profiling: DatasetProfilingRun): + """Returns a status of a glue job""" + run_id = profiling.GlueJobRunId + try: + response = self._client.get_job_run(JobName=self._name, RunId=run_id) + return response['JobRun']['JobRunState'] + except ClientError as e: + log.error(f'Failed to get job run {run_id} due to: {e}') + raise e + + def run_job(self, profiling: DatasetProfilingRun): + """Run glue job. Returns id of the job""" + args = {'--table': profiling.GlueTableName} if profiling.GlueTableName else {} + try: + response = self._client.start_job_run( + JobName=self._name, Arguments=args + ) + + return response['JobRunId'] + except ClientError as e: + log.error(f'Failed to start profiling job {self._name} due to: {e}') + raise e diff --git a/backend/dataall/modules/datasets/aws/glue_table_client.py b/backend/dataall/modules/datasets/aws/glue_table_client.py new file mode 100644 index 000000000..497c51527 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/glue_table_client.py @@ -0,0 +1,47 @@ +import logging + +from botocore.exceptions import ClientError + +from dataall.modules.datasets_base.db.dataset_models import DatasetTable + +log = logging.getLogger(__name__) + + +class GlueTableClient: + """Makes requests to AWS Glue API""" + def __init__(self, aws_session, table: DatasetTable): + self._client = aws_session.client('glue', region_name=table.region) + self._table = table + + def get_table(self): + dataset_table = self._table + try: + glue_table = self._client.get_table( + CatalogId=dataset_table.AWSAccountId, + DatabaseName=dataset_table.GlueDatabaseName, + Name=dataset_table.name, + ) + return glue_table + except ClientError as e: + log.error( + f'Failed to get table aws://{dataset_table.AWSAccountId}' + f'//{dataset_table.GlueDatabaseName}' + f'//{dataset_table.name} due to: ' + f'{e}' + ) + return {} + + def update_table_for_column(self, column_name, table_input): + try: + response = self._client.update_table( + DatabaseName=self._table.name, + TableInput=table_input, + ) + log.info( + f'Column {column_name} updated successfully: {response}' + ) + except ClientError as e: + log.error( + f'Failed to update table column {column_name} description: {e}' + ) + raise e diff --git a/backend/dataall/modules/datasets/aws/lf_dataset_client.py b/backend/dataall/modules/datasets/aws/lf_dataset_client.py new file mode 100644 index 000000000..fc8195fbc --- /dev/null +++ b/backend/dataall/modules/datasets/aws/lf_dataset_client.py @@ -0,0 +1,45 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) +PIVOT_ROLE_NAME_PREFIX = "dataallPivotRole" + + +class LakeFormationDatasetClient: + + def __init__(self, env: Environment, dataset: Dataset): + session = SessionHelper.remote_session(env.AwsAccountId) + self._client = session.client('lakeformation', region_name=env.region) + self._dataset = dataset + self._env = env + + def check_existing_lf_registered_location(self): + """ + Checks if there is a non-dataall-created registered location for the Dataset + Returns False is already existing location else return the resource info + """ + + resource_arn = f'arn:aws:s3:::{self._dataset.S3BucketName}' + try: + + response = self._client.describe_resource(ResourceArn=resource_arn) + registered_role_name = response['ResourceInfo']['RoleArn'].lstrip(f"arn:aws:iam::{self._env}:role/") + log.info(f'LF data location already registered: {response}, registered with role {registered_role_name}') + if ( + registered_role_name.startswith(PIVOT_ROLE_NAME_PREFIX) + or response['ResourceInfo']['RoleArn'] == self._dataset.IAMDatasetAdminRoleArn + ): + log.info( + 'The existing data location was created as part of the dataset stack. ' + 'There was no pre-existing data location.' + ) + return False + return response['ResourceInfo'] + + except ClientError as e: + log.info(f'LF data location for resource {resource_arn} not found due to {e}') + return False diff --git a/backend/dataall/modules/datasets/aws/lf_table_client.py b/backend/dataall/modules/datasets/aws/lf_table_client.py new file mode 100644 index 000000000..a248d004c --- /dev/null +++ b/backend/dataall/modules/datasets/aws/lf_table_client.py @@ -0,0 +1,67 @@ +import logging +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.datasets_base.db.dataset_models import DatasetTable + +log = logging.getLogger(__name__) + + +class LakeFormationTableClient: + """Requests to AWS LakeFormation""" + + def __init__(self, table: DatasetTable, aws_session=None): + if not aws_session: + aws_session = SessionHelper.remote_session(table.AWSAccountId) + self._client = aws_session.client('lakeformation', region_name=table.region) + self._table = table + + def grant_pivot_role_all_table_permissions(self): + """ + Pivot role needs to have all permissions + for tables managed inside dataall + """ + table = self._table + principal = SessionHelper.get_delegation_role_arn(table.AWSAccountId) + self._grant_permissions_to_table(principal, ['SELECT', 'ALTER', 'DROP', 'INSERT']) + + def grant_principals_all_table_permissions(self, principals: [str]): + """ + Update the table permissions on Lake Formation + for tables managed by data.all + :param principals: + :return: + """ + + for principal in principals: + try: + self._grant_permissions_to_table(principal, ['ALL']) + except ClientError: + pass # ignore the error to continue with other requests + + def _grant_permissions_to_table(self, principal, permissions): + table = self._table + try: + grant_dict = dict( + Principal={'DataLakePrincipalIdentifier': principal}, + Resource={ + 'Table': { + 'DatabaseName': table.GlueDatabaseName, + 'Name': table.name, + } + }, + Permissions=permissions, + ) + response = self._client.grant_permissions(**grant_dict) + log.error( + f'Successfully granted pivot role all table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {response}' + ) + except ClientError as e: + log.error( + f'Failed to grant pivot role all table ' + f'aws://{table.AWSAccountId}/{table.GlueDatabaseName}/{table.name} ' + f'access: {e}' + ) + raise e diff --git a/backend/dataall/modules/datasets/aws/s3_dataset_client.py b/backend/dataall/modules/datasets/aws/s3_dataset_client.py new file mode 100644 index 000000000..f3618df38 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_dataset_client.py @@ -0,0 +1,114 @@ +import json +import logging + +from botocore.config import Config +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) + + +class S3DatasetClient: + + def __init__(self, dataset: Dataset): + self._client = SessionHelper.remote_session(dataset.AwsAccountId).client( + 's3', + region_name=dataset.region, + config=Config(signature_version='s3v4', s3={'addressing_style': 'virtual'}), + ) + self._dataset = dataset + + def get_file_upload_presigned_url(self, data): + dataset = self._dataset + try: + self._client.get_bucket_acl( + Bucket=dataset.S3BucketName, ExpectedBucketOwner=dataset.AwsAccountId + ) + response = self._client.generate_presigned_post( + Bucket=dataset.S3BucketName, + Key=data.get('prefix', 'uploads') + '/' + data.get('fileName'), + ExpiresIn=15 * 60, + ) + + return json.dumps(response) + except ClientError as e: + raise e + + +class S3DatasetBucketPolicyClient: + def __init__(self, dataset: Dataset): + session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) + self._client = session.client('s3') + self._dataset = dataset + + def get_bucket_policy(self): + dataset = self._dataset + try: + policy = self._client.get_bucket_policy(Bucket=dataset.S3BucketName)['Policy'] + log.info(f'Current bucket policy---->:{policy}') + policy = json.loads(policy) + except ClientError as err: + if err.response['Error']['Code'] == 'NoSuchBucketPolicy': + log.info(f"No policy attached to '{dataset.S3BucketName}'") + + elif err.response['Error']['Code'] == 'NoSuchBucket': + log.error(f'Bucket deleted {dataset.S3BucketName}') + + elif err.response['Error']['Code'] == 'AccessDenied': + log.error( + f'Access denied in {dataset.AwsAccountId} ' + f'(s3:{err.operation_name}, ' + f"resource='{dataset.S3BucketName}')" + ) + else: + log.exception( + f"Failed to get '{dataset.S3BucketName}' policy in {dataset.AwsAccountId}" + ) + policy = { + 'Version': '2012-10-17', + 'Statement': [ + { + 'Sid': 'OwnerAccount', + 'Effect': 'Allow', + 'Action': ['s3:*'], + 'Resource': [ + f'arn:aws:s3:::{dataset.S3BucketName}', + f'arn:aws:s3:::{dataset.S3BucketName}/*', + ], + 'Principal': { + 'AWS': f'arn:aws:iam::{dataset.AwsAccountId}:root' + }, + } + ], + } + + return policy + + def put_bucket_policy(self, policy): + dataset = self._dataset + update_policy_report = { + 'datasetUri': dataset.datasetUri, + 'bucketName': dataset.S3BucketName, + 'accountId': dataset.AwsAccountId, + } + try: + policy_json = json.dumps(policy) if isinstance(policy, dict) else policy + log.info( + f"Putting new bucket policy on '{dataset.S3BucketName}' policy {policy_json}" + ) + response = self._client.put_bucket_policy( + Bucket=dataset.S3BucketName, Policy=policy_json + ) + log.info(f'Bucket Policy updated: {response}') + update_policy_report.update({'status': 'SUCCEEDED'}) + except ClientError as e: + log.error( + f'Failed to update bucket policy ' + f"on '{dataset.S3BucketName}' policy {policy} " + f'due to {e} ' + ) + update_policy_report.update({'status': 'FAILED'}) + + return update_policy_report diff --git a/backend/dataall/modules/datasets/aws/s3_location_client.py b/backend/dataall/modules/datasets/aws/s3_location_client.py new file mode 100644 index 000000000..bf307cc50 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_location_client.py @@ -0,0 +1,31 @@ +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation + +log = logging.getLogger(__name__) + + +class S3LocationClient: + + def __init__(self, location: DatasetStorageLocation): + session = SessionHelper.remote_session(accountid=location.AWSAccountId) + self._client = session.client('s3', region_name=location.region) + self._location = location + + def create_bucket_prefix(self): + location = self._location + try: + response = self._client.put_object( + Bucket=location.S3BucketName, Body='', Key=location.S3Prefix + '/' + ) + log.info( + 'Creating S3 Prefix `{}`({}) on AWS #{}'.format( + location.S3BucketName, location.AWSAccountId, response + ) + ) + except Exception as e: + log.error( + f'Dataset storage location creation failed on S3 for dataset location {location.locationUri} : {e}' + ) + raise e diff --git a/backend/dataall/modules/datasets/aws/s3_profiler_client.py b/backend/dataall/modules/datasets/aws/s3_profiler_client.py new file mode 100644 index 000000000..d61659f8e --- /dev/null +++ b/backend/dataall/modules/datasets/aws/s3_profiler_client.py @@ -0,0 +1,30 @@ +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.db.environment_models import Environment + +log = logging.getLogger(__name__) + + +class S3ProfilerClient: + def __init__(self, env: Environment): + self._client = SessionHelper.remote_session(env.AwsAccountId).client( + 's3', region_name=env.region + ) + self._env = env + + def get_profiling_results_from_s3(self, dataset, table, run): + s3 = self._client + try: + key = f'profiling/results/{dataset.datasetUri}/{table.GlueTableName}/{run.GlueJobRunId}/results.json' + s3.head_object(Bucket=self._env.EnvironmentDefaultBucketName, Key=key) + response = s3.get_object( + Bucket=self._env.EnvironmentDefaultBucketName, Key=key + ) + content = str(response['Body'].read().decode('utf-8')) + return content + except Exception as e: + log.error( + f'Failed to retrieve S3 results for table profiling job ' + f'{table.GlueTableName}//{run.GlueJobRunId} due to {e}' + ) diff --git a/backend/dataall/modules/datasets/aws/sns_dataset_client.py b/backend/dataall/modules/datasets/aws/sns_dataset_client.py new file mode 100644 index 000000000..5c17d3697 --- /dev/null +++ b/backend/dataall/modules/datasets/aws/sns_dataset_client.py @@ -0,0 +1,39 @@ +import json +import logging + +from botocore.exceptions import ClientError + +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.datasets import Dataset + +log = logging.getLogger(__name__) + + +class SnsDatasetClient: + + def __init__(self, environment: Environment, dataset: Dataset): + aws_session = SessionHelper.remote_session( + accountid=environment.AwsAccountId + ) + + self._client = aws_session.client('sns', region_name=environment.region) + self._topic = f'arn:aws:sns:{environment.region}:{environment.AwsAccountId}:{environment.subscriptionsConsumersTopicName}' + self._dataset = dataset + + def publish_dataset_message(self, message: dict): + + try: + response = self._client.publish( + TopicArn=self._topic, + Message=json.dumps(message), + ) + return response + except ClientError as e: + log.error( + f'Failed to deliver dataset ' + f'{self._dataset.datasetUri}|{message} ' + f'update message for consumers ' + f'due to: {e} ' + ) + raise e diff --git a/backend/dataall/modules/datasets/cdk/__init__.py b/backend/dataall/modules/datasets/cdk/__init__.py new file mode 100644 index 000000000..7353346ad --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/__init__.py @@ -0,0 +1,10 @@ +from dataall.modules.datasets.cdk import dataset_stack, env_role_dataset_databrew_policy, env_role_dataset_glue_policy, \ + env_role_dataset_s3_policy, pivot_role_datasets_policy + +__all__ = [ + "dataset_stack", + "env_role_dataset_databrew_policy", + "env_role_dataset_glue_policy", + "env_role_dataset_s3_policy", + "pivot_role_datasets_policy" +] diff --git a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/__init__.py b/backend/dataall/modules/datasets/cdk/assets/gluedatabasecustomresource/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/assets/gluedatabasecustomresource/__init__.py rename to backend/dataall/modules/datasets/cdk/assets/gluedatabasecustomresource/__init__.py diff --git a/backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py b/backend/dataall/modules/datasets/cdk/assets/gluedatabasecustomresource/index.py similarity index 100% rename from backend/dataall/cdkproxy/assets/gluedatabasecustomresource/index.py rename to backend/dataall/modules/datasets/cdk/assets/gluedatabasecustomresource/index.py diff --git a/backend/dataall/cdkproxy/assets/glueprofilingjob/glue_script.py b/backend/dataall/modules/datasets/cdk/assets/glueprofilingjob/glue_script.py similarity index 100% rename from backend/dataall/cdkproxy/assets/glueprofilingjob/glue_script.py rename to backend/dataall/modules/datasets/cdk/assets/glueprofilingjob/glue_script.py diff --git a/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/__init__.py b/backend/dataall/modules/datasets/cdk/assets/lakeformationdefaultsettings/__init__.py similarity index 100% rename from backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/__init__.py rename to backend/dataall/modules/datasets/cdk/assets/lakeformationdefaultsettings/__init__.py diff --git a/backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py b/backend/dataall/modules/datasets/cdk/assets/lakeformationdefaultsettings/index.py similarity index 100% rename from backend/dataall/cdkproxy/assets/lakeformationdefaultsettings/index.py rename to backend/dataall/modules/datasets/cdk/assets/lakeformationdefaultsettings/index.py diff --git a/backend/dataall/modules/datasets/cdk/dataset_custom_resources_extension.py b/backend/dataall/modules/datasets/cdk/dataset_custom_resources_extension.py new file mode 100644 index 000000000..a9e4efd1e --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/dataset_custom_resources_extension.py @@ -0,0 +1,239 @@ +import os +import logging +import pathlib +from aws_cdk import ( + custom_resources as cr, + aws_iam as iam, + aws_lambda as _lambda, + aws_lambda_destinations as lambda_destination, + aws_ssm as ssm, + aws_sqs as sqs, + aws_kms as kms, + RemovalPolicy, + Duration, + CustomResource, +) + +from dataall.core.environment.cdk.environment_stack import EnvironmentSetup, EnvironmentStackExtension + +log = logging.getLogger(__name__) + + +class DatasetCustomResourcesExtension(EnvironmentStackExtension): + """Extends an environment stack for LakeFormation settings custom resource and Glue database custom resource""" + + @staticmethod + def extent(setup: EnvironmentSetup): + kms_key = DatasetCustomResourcesExtension.set_cr_kms_key( + setup=setup, + environment=setup.environment(), + group_roles=setup.group_roles, + default_role=setup.default_role + ) + + # Lakeformation default settings custom resource + # Set PivotRole as Lake Formation data lake admin + entry_point = str( + pathlib.PosixPath(os.path.dirname(__file__), + './assets/lakeformationdefaultsettings').resolve() + ) + + lakeformation_cr_dlq = DatasetCustomResourcesExtension.set_dlq( + setup=setup, + queue_name=f'{setup.environment().resourcePrefix}-lfcr-{setup.environment().environmentUri}', + kms_key=kms_key + ) + lf_default_settings_custom_resource = _lambda.Function( + setup, + 'LakeformationDefaultSettingsHandler', + function_name=f'{setup.environment().resourcePrefix}-lf-settings-handler-{setup.environment().environmentUri}', + role=setup.pivot_role, + handler='index.on_event', + code=_lambda.Code.from_asset(entry_point), + memory_size=1664, + description='This Lambda function is a cloudformation custom resource provider for Lakeformation default settings', + timeout=Duration.seconds(5 * 60), + environment={ + 'envname': setup.environment().name, + 'LOG_LEVEL': 'DEBUG', + 'AWS_ACCOUNT': setup.environment().AwsAccountId, + 'DEFAULT_ENV_ROLE_ARN': setup.environment().EnvironmentDefaultIAMRoleArn, + 'DEFAULT_CDK_ROLE_ARN': setup.environment().CDKRoleArn, + }, + dead_letter_queue_enabled=True, + dead_letter_queue=lakeformation_cr_dlq, + on_failure=lambda_destination.SqsDestination(lakeformation_cr_dlq), + runtime=_lambda.Runtime.PYTHON_3_9, + ) + LakeformationDefaultSettingsProvider = cr.Provider( + setup, + f'{setup.environment().resourcePrefix}LakeformationDefaultSettingsProvider', + on_event_handler=lf_default_settings_custom_resource, + ) + + default_lf_settings = CustomResource( + setup, + f'{setup.environment().resourcePrefix}DefaultLakeFormationSettings', + service_token=LakeformationDefaultSettingsProvider.service_token, + resource_type='Custom::LakeformationDefaultSettings', + properties={ + 'DataLakeAdmins': [ + f'arn:aws:iam::{setup.environment().AwsAccountId}:role/{setup.pivot_role_name}', + ] + }, + ) + + ssm.StringParameter( + setup, + 'LakeformationDefaultSettingsCustomeResourceFunctionArn', + string_value=lf_default_settings_custom_resource.function_arn, + parameter_name=f'/dataall/{setup.environment().environmentUri}/cfn/lf/defaultsettings/lambda/arn', + ) + + ssm.StringParameter( + setup, + 'LakeformationDefaultSettingsCustomeResourceFunctionName', + string_value=lf_default_settings_custom_resource.function_name, + parameter_name=f'/dataall/{setup.environment().environmentUri}/cfn/lf/defaultsettings/lambda/name', + ) + # Glue database custom resource + # This Lambda is triggered with the creation of each dataset, it is not executed when the environment is created + entry_point = str( + pathlib.PosixPath(os.path.dirname(__file__), './assets/gluedatabasecustomresource').resolve() + ) + + gluedb_lf_cr_dlq = DatasetCustomResourcesExtension.set_dlq( + setup=setup, + queue_name=f'{setup.environment().resourcePrefix}-gluedb-lf-cr-{setup.environment().environmentUri}', + kms_key=kms_key + ) + gluedb_lf_custom_resource = _lambda.Function( + setup, + 'GlueDatabaseLFCustomResourceHandler', + function_name=f'{setup.environment().resourcePrefix}-gluedb-lf-handler-{setup.environment().environmentUri}', + role=setup.pivot_role, + handler='index.on_event', + code=_lambda.Code.from_asset(entry_point), + memory_size=1664, + description='This Lambda function is a cloudformation custom resource provider for Glue database ' + 'as Cfn currently does not support the CreateTableDefaultPermissions parameter', + timeout=Duration.seconds(5 * 60), + environment={ + 'envname': setup.environment().name, + 'LOG_LEVEL': 'DEBUG', + 'AWS_ACCOUNT': setup.environment().AwsAccountId, + 'DEFAULT_ENV_ROLE_ARN': setup.environment().EnvironmentDefaultIAMRoleArn, + 'DEFAULT_CDK_ROLE_ARN': setup.environment().CDKRoleArn, + }, + dead_letter_queue_enabled=True, + dead_letter_queue=gluedb_lf_cr_dlq, + on_failure=lambda_destination.SqsDestination(gluedb_lf_cr_dlq), + tracing=_lambda.Tracing.ACTIVE, + runtime=_lambda.Runtime.PYTHON_3_9, + ) + + glue_db_provider = cr.Provider( + setup, + f'{setup.environment().resourcePrefix}GlueDbCustomResourceProvider', + on_event_handler=gluedb_lf_custom_resource + ) + ssm.StringParameter( + setup, + 'GlueLFCustomResourceFunctionArn', + string_value=gluedb_lf_custom_resource.function_arn, + parameter_name=f'/dataall/{setup.environment().environmentUri}/cfn/custom-resources/gluehandler/lambda/arn', + ) + + ssm.StringParameter( + setup, + 'GlueLFCustomResourceFunctionName', + string_value=gluedb_lf_custom_resource.function_name, + parameter_name=f'/dataall/{setup.environment().environmentUri}/cfn/custom-resources/gluehandler/lambda/name', + ) + + ssm.StringParameter( + setup, + 'GlueLFCustomResourceProviderServiceToken', + string_value=glue_db_provider.service_token, + parameter_name=f'/dataall/{setup.environment().environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', + ) + + @staticmethod + def set_cr_kms_key(setup, environment, group_roles, default_role) -> kms.Key: + key_policy = iam.PolicyDocument( + assign_sids=True, + statements=[ + iam.PolicyStatement( + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + ], + effect=iam.Effect.ALLOW, + principals=[ + default_role, + ] + group_roles, + resources=["*"], + conditions={ + "StringEquals": {"kms:ViaService": f"sqs.{environment.region}.amazonaws.com"} + } + ), + iam.PolicyStatement( + actions=[ + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ], + effect=iam.Effect.ALLOW, + principals=[ + default_role, + ] + group_roles, + resources=["*"], + ) + ] + ) + + kms_key = kms.Key( + setup, + f'dataall-environment-{environment.environmentUri}-cr-key', + removal_policy=RemovalPolicy.DESTROY, + alias=f'dataall-environment-{environment.environmentUri}-cr-key', + enable_key_rotation=True, + admins=[ + iam.ArnPrincipal(environment.CDKRoleArn), + ], + policy=key_policy + ) + return kms_key + + @staticmethod + def set_dlq(setup, queue_name, kms_key) -> sqs.Queue: + dlq = sqs.Queue( + setup, + f'{queue_name}-queue', + queue_name=f'{queue_name}', + retention_period=Duration.days(14), + encryption=sqs.QueueEncryption.KMS, + encryption_master_key=kms_key, + data_key_reuse=Duration.days(1), + removal_policy=RemovalPolicy.DESTROY, + ) + + enforce_tls_statement = iam.PolicyStatement( + sid='Enforce TLS for all principals', + effect=iam.Effect.DENY, + principals=[ + iam.AnyPrincipal(), + ], + actions=[ + 'sqs:*', + ], + resources=[dlq.queue_arn], + conditions={ + 'Bool': {'aws:SecureTransport': 'false'}, + }, + ) + + dlq.add_to_resource_policy(enforce_tls_statement) + return dlq diff --git a/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py new file mode 100644 index 000000000..a4218b710 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/dataset_glue_profiler_extension.py @@ -0,0 +1,41 @@ +import os +import logging +import shutil +from aws_cdk import aws_s3_deployment + +from dataall.core.environment.cdk.environment_stack import EnvironmentSetup, EnvironmentStackExtension + +log = logging.getLogger(__name__) + + +class DatasetGlueProfilerExtension(EnvironmentStackExtension): + """Extends an environment stack for glue profiler """ + + @staticmethod + def extent(setup: EnvironmentSetup): + asset_path = DatasetGlueProfilerExtension.get_path_to_asset() + profiling_assetspath = DatasetGlueProfilerExtension.zip_code(asset_path) + + aws_s3_deployment.BucketDeployment( + setup, + f'{setup.environment().resourcePrefix}GlueProflingJobDeployment', + sources=[aws_s3_deployment.Source.asset(profiling_assetspath)], + destination_bucket=setup.default_environment_bucket, + destination_key_prefix='profiling/code', + ) + + @staticmethod + def get_path_to_asset(): + return os.path.realpath( + os.path.abspath( + os.path.join(__file__, '..', 'assets', 'glueprofilingjob') + ) + ) + + @staticmethod + def zip_code(assets_path, s3_key='profiler'): + log.info('Zipping code') + shutil.make_archive( + base_name=f'{assets_path}/{s3_key}', format='zip', root_dir=f'{assets_path}' + ) + return assets_path diff --git a/backend/dataall/modules/datasets/cdk/dataset_stack.py b/backend/dataall/modules/datasets/cdk/dataset_stack.py new file mode 100644 index 000000000..45b163b7f --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/dataset_stack.py @@ -0,0 +1,541 @@ +import logging +import os + +from aws_cdk import ( + aws_s3 as s3, + aws_kms as kms, + aws_iam as iam, + aws_ssm as ssm, + aws_glue as glue, + Stack, + Duration, + CfnResource, + CustomResource, + Tags, +) +from aws_cdk.aws_glue import CfnCrawler + +from dataall.base import db +from dataall.base.aws.quicksight import QuicksightClient +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.base.cdkproxy.stacks.manager import stack +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.stacks.services.runtime_stacks_tagging import TagsUtil +from dataall.modules.datasets.aws.lf_dataset_client import LakeFormationDatasetClient +from dataall.modules.datasets_base.db.dataset_models import Dataset +from dataall.base.utils.cdk_nag_utils import CDKNagUtil + +logger = logging.getLogger(__name__) + + +@stack(stack='dataset') +class DatasetStack(Stack): + """Deploy common dataset resources: + - dataset S3 Bucket + KMS key (If S3 Bucket not imported) + - dataset IAM role + - custom resource to create glue database and grant permissions + - custom resource to register S3 location in LF + - Glue crawler + - Glue profiling job + """ + module_name = __file__ + + def get_engine(self) -> db.Engine: + envname = os.environ.get('envname', 'local') + engine = db.get_engine(envname=envname) + return engine + + def get_env(self, dataset) -> Environment: + engine = self.get_engine() + with engine.scoped_session() as session: + env = session.query(Environment).get(dataset.environmentUri) + return env + + def get_env_group(self, dataset) -> EnvironmentGroup: + engine = self.get_engine() + with engine.scoped_session() as session: + env = EnvironmentService.get_environment_group( + session, dataset.SamlAdminGroupName, dataset.environmentUri + ) + return env + + def get_target_with_uri(self, target_uri) -> Dataset: + engine = self.get_engine() + with engine.scoped_session() as session: + dataset = session.query(Dataset).get(target_uri) + if not dataset: + raise Exception('ObjectNotFound') + return dataset + + def get_target(self) -> Dataset: + engine = self.get_engine() + with engine.scoped_session() as session: + dataset = session.query(Dataset).get(self.target_uri) + if not dataset: + raise Exception('ObjectNotFound') + return dataset + + def has_quicksight_enabled(self, env) -> bool: + with self.get_engine().scoped_session() as session: + return EnvironmentService.get_boolean_env_param(session, env, "dashboardsEnabled") + + def __init__(self, scope, id, target_uri: str = None, **kwargs): + super().__init__( + scope, + id, + description="Cloud formation stack of DATASET: {}; URI: {}; DESCRIPTION: {}".format( + self.get_target_with_uri(target_uri=target_uri).label, + target_uri, + self.get_target_with_uri(target_uri=target_uri).description, + )[:1024], + **kwargs) + + # Read input + self.target_uri = target_uri + self.pivot_role_name = SessionHelper.get_delegation_role_name() + dataset = self.get_target() + env = self.get_env(dataset) + env_group = self.get_env_group(dataset) + + quicksight_default_group_arn = None + if self.has_quicksight_enabled(env): + quicksight_default_group_arn = f"arn:aws:quicksight:{dataset.region}:{dataset.AwsAccountId}:group/default/{QuicksightClient.DEFAULT_GROUP_NAME}" + + # Dataset S3 Bucket and KMS key + dataset_key = False + if dataset.imported and dataset.importedS3Bucket: + dataset_bucket = s3.Bucket.from_bucket_name( + self, f'ImportedBucket{dataset.datasetUri}', dataset.S3BucketName + ) + if dataset.importedKmsKey: + dataset_key = kms.Key.from_lookup( + self, f'ImportedKey{dataset.datasetUri}', alias_name=f"alias/{dataset.KmsAlias}" + ) + else: + dataset_key = kms.Key( + self, + 'DatasetKmsKey', + alias=dataset.KmsAlias, + enable_key_rotation=True, + policy=iam.PolicyDocument( + statements=[ + iam.PolicyStatement( + sid="EnableDatasetOwnerKeyUsage", + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[ + iam.ArnPrincipal(env_group.environmentIAMRoleArn), + ], + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ], + ), + iam.PolicyStatement( + sid='KMSPivotRolePermissions', + effect=iam.Effect.ALLOW, + actions=[ + 'kms:Decrypt', + 'kms:Encrypt', + 'kms:GenerateDataKey*', + 'kms:PutKeyPolicy', + "kms:GetKeyPolicy", + 'kms:ReEncrypt*', + 'kms:TagResource', + 'kms:UntagResource', + 'kms:DeleteAlias', + 'kms:DescribeKey', + 'kms:CreateAlias', + 'kms:List*', + ], + resources=['*'], + principals=[ + iam.ArnPrincipal(f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}') + ], + ) + ] + ), + admins=[ + iam.ArnPrincipal(env.CDKRoleArn), + ] + ) + + dataset_bucket = s3.Bucket( + self, + 'DatasetBucket', + bucket_name=dataset.S3BucketName, + encryption=s3.BucketEncryption.KMS, + encryption_key=dataset_key, + cors=[ + s3.CorsRule( + allowed_methods=[ + s3.HttpMethods.HEAD, + s3.HttpMethods.POST, + s3.HttpMethods.PUT, + s3.HttpMethods.DELETE, + s3.HttpMethods.GET, + ], + allowed_origins=['*'], + allowed_headers=['*'], + exposed_headers=[], + ) + ], + block_public_access=s3.BlockPublicAccess.BLOCK_ALL, + server_access_logs_bucket=s3.Bucket.from_bucket_name( + self, + 'EnvAccessLogsBucket', + f'{env.EnvironmentDefaultBucketName}', + ), + server_access_logs_prefix=f'access_logs/{dataset.S3BucketName}/', + enforce_ssl=True, + versioned=True, + bucket_key_enabled=True, + ) + + dataset_bucket.add_lifecycle_rule( + abort_incomplete_multipart_upload_after=Duration.days(7), + noncurrent_version_transitions=[ + s3.NoncurrentVersionTransition( + storage_class=s3.StorageClass.INFREQUENT_ACCESS, + transition_after=Duration.days(30), + ), + s3.NoncurrentVersionTransition( + storage_class=s3.StorageClass.GLACIER, + transition_after=Duration.days(60), + ), + ], + transitions=[ + s3.Transition( + storage_class=s3.StorageClass.INTELLIGENT_TIERING, + transition_after=Duration.days(90), + ), + s3.Transition( + storage_class=s3.StorageClass.GLACIER, + transition_after=Duration.days(360), + ), + ], + enabled=True, + ) + + # Dataset IAM role - ETL policies + dataset_admin_policy = iam.Policy( + self, + 'DatasetAdminPolicy', + policy_name=dataset.S3BucketName, + statements=[ + iam.PolicyStatement( + sid="ListAll", + actions=[ + "s3:ListAllMyBuckets", + "s3:ListAccessPoints", + ], + resources=["*"], + effect=iam.Effect.ALLOW + ), + iam.PolicyStatement( + sid="ListDatasetBucket", + actions=[ + "s3:ListBucket", + "s3:GetBucketLocation" + ], + resources=[dataset_bucket.bucket_arn], + effect=iam.Effect.ALLOW, + ), + iam.PolicyStatement( + sid="ReadWriteDatasetBucket", + actions=[ + "s3:PutObject", + "s3:PutObjectAcl", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectVersion", + "s3:DeleteObject" + ], + effect=iam.Effect.ALLOW, + resources=[dataset_bucket.bucket_arn + '/*'], + ), + iam.PolicyStatement( + sid="ReadAccessPointsDatasetBucket", + actions=[ + 's3:GetAccessPoint', + 's3:GetAccessPointPolicy', + 's3:GetAccessPointPolicyStatus', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:s3:{dataset.region}:{dataset.AwsAccountId}:accesspoint/{dataset.datasetUri}*', + ], + ), + iam.PolicyStatement( + sid="GlueAccessCrawler", + actions=[ + "glue:Get*", + "glue:BatchGet*", + "glue:CreateTable", + "glue:UpdateTable", + "glue:DeleteTableVersion", + "glue:DeleteTable", + 'glue:BatchCreatePartition', + 'glue:BatchDeleteConnection', + 'glue:BatchDeletePartition', + 'glue:BatchDeleteTable', + 'glue:BatchDeleteTableVersion', + ], + effect=iam.Effect.ALLOW, + resources=[ + f"arn:aws:glue:*:{dataset.AwsAccountId}:catalog", + f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:database/{dataset.GlueDatabaseName}", + f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:table/{dataset.GlueDatabaseName}/*" + ] + ), + iam.PolicyStatement( + sid="GlueAccessDefault", + actions=[ + "glue:GetDatabase", + ], + effect=iam.Effect.ALLOW, + resources=[ + f"arn:aws:glue:{dataset.region}:{dataset.AwsAccountId}:database/default", + ] + ), + iam.PolicyStatement( + sid="CreateLoggingGlue", + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/crawlers*', + f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/jobs/*', + ], + ), + iam.PolicyStatement( + sid="LoggingGlue", + actions=[ + 'logs:PutLogEvents', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/crawlers:log-stream:{dataset.GlueCrawlerName}', + f'arn:aws:logs:{dataset.region}:{dataset.AwsAccountId}:log-group:/aws-glue/jobs/*', + ], + ), + iam.PolicyStatement( + actions=['s3:ListBucket'], + resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}'], + effect=iam.Effect.ALLOW + ), + iam.PolicyStatement( + sid="ReadEnvironmentBucketProfiling", + actions=[ + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectVersion" + ], + effect=iam.Effect.ALLOW, + resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}/profiling/code/*'], + ), + iam.PolicyStatement( + sid="ReadWriteEnvironmentBucketProfiling", + actions=[ + "s3:PutObject", + "s3:PutObjectAcl", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectVersion", + "s3:DeleteObject" + ], + resources=[f'arn:aws:s3:::{env.EnvironmentDefaultBucketName}/profiling/results/{dataset.datasetUri}/*'], + effect=iam.Effect.ALLOW, + ), + ], + ) + if dataset_key: + dataset_admin_policy.add_statements( + iam.PolicyStatement( + sid="KMSAccess", + actions=[ + "kms:Decrypt", + "kms:Encrypt", + "kms:GenerateDataKey" + ], + effect=iam.Effect.ALLOW, + resources=[dataset_key.key_arn], + ) + ) + dataset_admin_policy.node.add_dependency(dataset_bucket) + + dataset_admin_role = iam.Role( + self, + 'DatasetAdminRole', + role_name=dataset.IAMDatasetAdminRoleArn.split('/')[-1], + assumed_by=iam.CompositePrincipal( + iam.ArnPrincipal( + f'arn:aws:iam::{dataset.AwsAccountId}:role/{self.pivot_role_name}' + ), + iam.ServicePrincipal('glue.amazonaws.com'), + ), + ) + dataset_admin_policy.attach_to_role(dataset_admin_role) + + # Add Key Policy For Users + if not dataset.imported: + dataset_key.add_to_resource_policy( + iam.PolicyStatement( + sid="EnableDatasetIAMRoleKeyUsage", + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[dataset_admin_role], + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + "kms:DescribeKey" + ], + ) + ) + + # Datalake location custom resource: registers the S3 location in LakeFormation + registered_location = LakeFormationDatasetClient(env, dataset).check_existing_lf_registered_location() + + if not registered_location: + storage_location = CfnResource( + self, + 'DatasetStorageLocation', + type='AWS::LakeFormation::Resource', + properties={ + 'ResourceArn': f'arn:aws:s3:::{dataset.S3BucketName}', + 'RoleArn': f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', + 'UseServiceLinkedRole': False, + }, + ) + + # Define dataset admin groups (those with data access grant) + dataset_admins = [ + dataset_admin_role.role_arn, + f'arn:aws:iam::{env.AwsAccountId}:role/{self.pivot_role_name}', + env_group.environmentIAMRoleArn, + ] + if quicksight_default_group_arn: + dataset_admins.append(quicksight_default_group_arn) + + # Get the Provider service token from SSM, the Lambda and Provider are created as part of the environment stack + glue_db_provider_service_token = ssm.StringParameter.from_string_parameter_name( + self, + 'GlueDatabaseProviderServiceToken', + string_parameter_name=f'/dataall/{dataset.environmentUri}/cfn/custom-resources/gluehandler/provider/servicetoken', + ) + + glue_db = CustomResource( + self, + f'{env.resourcePrefix}GlueDatabaseCustomResource', + service_token=glue_db_provider_service_token.string_value, + resource_type='Custom::GlueDatabase', + properties={ + 'CatalogId': dataset.AwsAccountId, + 'DatabaseInput': { + 'Description': 'dataall database {} '.format( + dataset.GlueDatabaseName + ), + 'LocationUri': f's3://{dataset.S3BucketName}/', + 'Name': f'{dataset.GlueDatabaseName}', + 'CreateTableDefaultPermissions': [], + 'Imported': 'IMPORTED-' if dataset.imported else 'CREATED-' + }, + 'DatabaseAdministrators': dataset_admins, + 'TriggerUpdate': True + }, + ) + + # Support resources: GlueCrawler for the dataset, Profiling Job and Trigger + crawler = glue.CfnCrawler( + self, + dataset.GlueCrawlerName, + description=f'datall Glue Crawler for S3 Bucket {dataset.S3BucketName}', + name=dataset.GlueCrawlerName, + database_name=dataset.GlueDatabaseName, + schedule={'scheduleExpression': f'{dataset.GlueCrawlerSchedule}'} + if dataset.GlueCrawlerSchedule + else None, + role=dataset_admin_role.role_arn, + targets=CfnCrawler.TargetsProperty( + s3_targets=[ + CfnCrawler.S3TargetProperty(path=f's3://{dataset.S3BucketName}') + ] + ), + ) + crawler.node.add_dependency(dataset_bucket) + + job_args = { + '--additional-python-modules': 'urllib3<2,pydeequ', + '--datasetUri': dataset.datasetUri, + '--database': dataset.GlueDatabaseName, + '--datasetRegion': dataset.region, + '--dataallRegion': os.getenv('AWS_REGION', 'eu-west-1'), + '--environmentUri': env.environmentUri, + '--environmentBucket': env.EnvironmentDefaultBucketName, + '--datasetBucket': dataset.S3BucketName, + '--apiUrl': 'None', + '--snsTopicArn': 'None', + '--extra-jars': ( + f's3://{env.EnvironmentDefaultBucketName}' + f'/profiling/code/jars/deequ-2.0.0-spark-3.1.jar' + ), + '--enable-metrics': 'true', + '--enable-continuous-cloudwatch-log': 'true', + '--enable-glue-datacatalog': 'true', + '--SPARK_VERSION': '3.1', + } + + job = glue.CfnJob( + self, + 'DatasetGlueProfilingJob', + name=dataset.GlueProfilingJobName, + description=f'datall Glue Profiling job for dataset {dataset.label}', + role=dataset_admin_role.role_arn, + allocated_capacity=10, + execution_property=glue.CfnJob.ExecutionPropertyProperty( + max_concurrent_runs=100 + ), + command=glue.CfnJob.JobCommandProperty( + name='glueetl', + python_version='3', + script_location=( + f's3://{env.EnvironmentDefaultBucketName}' + f'/profiling/code/glue_script.py' + ), + ), + default_arguments=job_args, + glue_version='3.0', + tags={'Application': 'dataall'}, + ) + if dataset.GlueProfilingTriggerSchedule: + trigger = glue.CfnTrigger( + self, + 'DatasetGlueProfilingTrigger', + name=dataset.GlueProfilingTriggerName, + description=f'datall Glue Profiling trigger schedule for dataset {dataset.label}', + type='SCHEDULED', + schedule=dataset.GlueProfilingTriggerSchedule, + start_on_creation=True, + actions=[ + glue.CfnTrigger.ActionProperty( + job_name=dataset.GlueProfilingJobName, arguments=job_args + ) + ], + ) + trigger.node.add_dependency(job) + + Tags.of(self).add('Classification', dataset.confidentiality) + + TagsUtil.add_tags(stack=self, model=Dataset, target_type="dataset") + + CDKNagUtil.check_rules(self) diff --git a/backend/dataall/modules/datasets/cdk/env_role_dataset_databrew_policy.py b/backend/dataall/modules/datasets/cdk/env_role_dataset_databrew_policy.py new file mode 100644 index 000000000..d7cf679c6 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/env_role_dataset_databrew_policy.py @@ -0,0 +1,61 @@ +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from aws_cdk import aws_iam as iam + +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET + + +class DatasetDatabrewServicePolicy(ServicePolicy): + """ + Class including all permissions needed to work with AWS DataBrew. + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_DATASET not in group_permissions: + return [] + + statements = [ + iam.PolicyStatement( + # sid="DataBrewGeneric", + actions=['databrew:List*'], + resources=['*'] + ), + iam.PolicyStatement( + # sid="DataBrewRecipes", + actions=[ + 'databrew:BatchDeleteRecipeVersion', + 'databrew:*Recipe', + ], + resources=[ + f'arn:aws:databrew:{self.region}:{self.account}:recipe/{self.resource_prefix}*' + ], + ), + iam.PolicyStatement( + # sid="DataBrewManageTeamResources", + not_actions=[ + 'databrew:Create*', + 'databrew:TagResource', + 'databrew:UntagResource', + ], + resources=[ + f'arn:aws:databrew:{self.region}:{self.account}:*/{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="DataBrewCreateTeamResources", + actions=[ + 'databrew:Create*', + 'databrew:TagResource', + ], + resources=[ + f'arn:aws:databrew:{self.region}:{self.account}:*/{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': {f'aws:RequestTag/{self.tag_key}': [self.tag_value]} + }, + ), + ] + return statements diff --git a/backend/dataall/modules/datasets/cdk/env_role_dataset_glue_policy.py b/backend/dataall/modules/datasets/cdk/env_role_dataset_glue_policy.py new file mode 100644 index 000000000..625cedc06 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/env_role_dataset_glue_policy.py @@ -0,0 +1,199 @@ +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from aws_cdk import aws_iam as iam + +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET + + +class DatasetGlueCatalogServicePolicy(ServicePolicy): + """ + Class including all permissions needed to work with AWS Glue Catalog. + """ + def get_statements(self, group_permissions, **kwargs): + if CREATE_DATASET not in group_permissions: + return [] + + statements = [ + iam.PolicyStatement( + # sid="GlueLFReadData", + effect=iam.Effect.ALLOW, + actions=[ + "lakeformation:GetDataAccess", + "glue:GetTable", + "glue:GetTables", + "glue:SearchTables", + "glue:GetDatabase", + "glue:GetDatabases", + "glue:GetPartitions", + "lakeformation:GetResourceLFTags", + "lakeformation:ListLFTags", + "lakeformation:GetLFTag", + "lakeformation:SearchTablesByLFTags", + "lakeformation:SearchDatabasesByLFTags" + ], + resources=["*"], + ), + iam.PolicyStatement( + # sid="GlueManageCatalog", + actions=[ + 'glue:CreateConnection', + 'glue:CreateDatabase', + 'glue:CreatePartition', + 'glue:CreateTable', + 'glue:CreateUserDefinedFunction', + 'glue:DeleteConnection', + 'glue:DeleteDatabase', + 'glue:DeleteTable', + 'glue:DeleteTableVersion', + 'glue:DeleteUserDefinedFunction', + 'glue:UpdateConnection', + 'glue:UpdateDatabase', + 'glue:UpdatePartition', + 'glue:UpdateTable', + 'glue:UpdateUserDefinedFunction', + 'glue:BatchCreatePartition', + 'glue:BatchDeleteConnection', + 'glue:BatchDeletePartition', + 'glue:BatchDeleteTable', + 'glue:BatchDeleteTableVersion', + 'glue:BatchGetPartition', + ], + resources=[ + f'arn:aws:glue:{self.region}:{self.account}:userDefinedFunction/{self.resource_prefix}*/*', + f'arn:aws:glue:{self.region}:{self.account}:database/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:catalog', + f'arn:aws:glue:{self.region}:{self.account}:table/{self.resource_prefix}*/*', + f'arn:aws:glue:{self.region}:{self.account}:connection/{self.resource_prefix}*', + ], + ) + ] + return statements + + +class DatasetGlueEtlServicePolicy(ServicePolicy): + """ + Class including all permissions needed to work with AWS Glue ETL. + """ + def get_statements(self, group_permissions, **kwargs): + statements = [ + iam.PolicyStatement( + # sid="ListBucketProfilingGlue", + actions=[ + "s3:ListBucket", + ], + effect=iam.Effect.ALLOW, + resources=[f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}'], + conditions={"StringEquals": { + "s3:prefix": ["", "profiling/", "profiling/code/"], + "s3:delimiter": ["/"]}} + ), + iam.PolicyStatement( + # sid="ReadEnvironmentBucketProfilingGlue", + actions=[ + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectVersion", + ], + resources=[ + f'arn:aws:s3:::{self.environment.EnvironmentDefaultBucketName}/profiling/code/*'], + effect=iam.Effect.ALLOW, + ), + iam.PolicyStatement( + # sid="GlueList", + effect=iam.Effect.ALLOW, + actions=[ + 'glue:Get*', + 'glue:List*', + 'glue:BatchGet*', + ], + resources=["*"], + ), + iam.PolicyStatement( + # sid="GlueCreateS3Bucket", + effect=iam.Effect.ALLOW, + actions=[ + 's3:CreateBucket', + 's3:ListBucket', + 's3:PutBucketPublicAccessBlock' + ], + resources=[f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}'], + ), + iam.PolicyStatement( + # sid="GlueReadWriteS3Bucket", + actions=[ + 's3:GetObject', + 's3:PutObject', + 's3:DeleteObject' + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}/{self.resource_prefix}/{self.team.groupUri}/', + f'arn:aws:s3:::aws-glue-assets-{self.account}-{self.region}/{self.resource_prefix}/{self.team.groupUri}/*', + ], + ), + iam.PolicyStatement( + # sid="GlueCreate", + effect=iam.Effect.ALLOW, + actions=[ + 'glue:CreateDevEndpoint', + 'glue:CreateCrawler', + 'glue:CreateJob', + 'glue:CreateTrigger', + 'glue:TagResource' + ], + resources=[ + f'arn:aws:glue:{self.region}:{self.account}:crawler/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:job/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:devEndpoint/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:catalog', + f'arn:aws:glue:{self.region}:{self.account}:trigger/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:table/{self.resource_prefix}*/*', + ], + conditions={ + 'StringEquals': {f'aws:RequestTag/{self.tag_key}': [self.tag_value]} + } + ), + iam.PolicyStatement( + # sid="GlueManageGlueResources", + effect=iam.Effect.ALLOW, + not_actions=[ + 'glue:CreateDevEndpoint', + 'glue:CreateTrigger', + 'glue:CreateJob', + 'glue:CreateCrawler', + ], + resources=[ + f'arn:aws:glue:{self.region}:{self.account}:devEndpoint/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:trigger/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:job/{self.resource_prefix}*', + f'arn:aws:glue:{self.region}:{self.account}:crawler/{self.resource_prefix}*' + ], + conditions={ + 'StringEquals': { + f'aws:resourceTag/{self.tag_key}': [self.tag_value] + } + }, + ), + iam.PolicyStatement( + # sid="SupportGluePermissions", + effect=iam.Effect.ALLOW, + actions=[ + 'glue:*Classifier', + 'glue:CreateScript', + ], + resources=['*'], + ), + iam.PolicyStatement( + # sid="LoggingGlue", + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws-glue/*', + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws-glue/*:log-stream:*', + ], + ) + ] + return statements diff --git a/backend/dataall/modules/datasets/cdk/env_role_dataset_s3_policy.py b/backend/dataall/modules/datasets/cdk/env_role_dataset_s3_policy.py new file mode 100644 index 000000000..03e3d7232 --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/env_role_dataset_s3_policy.py @@ -0,0 +1,97 @@ +from typing import List +from aws_cdk import aws_iam as iam + +from dataall.core.environment.cdk.env_role_core_policies.data_policy import S3Policy +from dataall.modules.dataset_sharing.aws.kms_client import KmsClient +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import Dataset + + +class DatasetS3Policy(S3Policy): + + def get_statements(self, session): + datasets = DatasetRepository.list_group_datasets( + session, + environment_id=self.environment.environmentUri, + group_uri=self.team.groupUri, + ) + return DatasetS3Policy._generate_dataset_statements(datasets) + + @staticmethod + def _generate_dataset_statements(datasets: List[Dataset]): + allowed_buckets = [] + allowed_access_points = [] + statements = [] + if datasets: + dataset: Dataset + for dataset in datasets: + allowed_buckets.append(f'arn:aws:s3:::{dataset.S3BucketName}') + allowed_access_points.append( + f'arn:aws:s3:{dataset.region}:{dataset.AwsAccountId}:accesspoint/{dataset.datasetUri}*') + allowed_buckets_content = [f"{bucket}/*" for bucket in allowed_buckets] + statements = [ + iam.PolicyStatement( + sid="ListDatasetsBuckets", + actions=[ + "s3:ListBucket", + "s3:GetBucketLocation" + ], + resources=allowed_buckets, + effect=iam.Effect.ALLOW, + ), + iam.PolicyStatement( + sid="ReadWriteDatasetsBuckets", + actions=[ + "s3:PutObject", + "s3:PutObjectAcl", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectVersion", + "s3:DeleteObject" + ], + effect=iam.Effect.ALLOW, + resources=allowed_buckets_content, + ), + iam.PolicyStatement( + sid="ReadAccessPointsDatasetBucket", + actions=[ + 's3:GetAccessPoint', + 's3:GetAccessPointPolicy', + 's3:GetAccessPointPolicyStatus', + ], + effect=iam.Effect.ALLOW, + resources=allowed_access_points, + ), + ] + kms_statement = DatasetS3Policy._set_allowed_kms_keys_statements(datasets) + if kms_statement: + statements.append(kms_statement) + return statements + + @staticmethod + def _set_allowed_kms_keys_statements(datasets): + allowed_buckets_kms_keys = [] + if datasets: + dataset: Dataset + for dataset in datasets: + if dataset.imported and dataset.importedKmsKey: + key_id = KmsClient(dataset.AwsAccountId, dataset.region).get_key_id( + key_alias=f"alias/{dataset.KmsAlias}" + ) + if key_id: + allowed_buckets_kms_keys.append( + f"arn:aws:kms:{dataset.region}:{dataset.AwsAccountId}:key/{key_id}") + if len(allowed_buckets_kms_keys): + return iam.PolicyStatement( + sid="KMSImportedDatasetAccess", + actions=[ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:DescribeKey", + "kms:GenerateDataKey" + ], + effect=iam.Effect.ALLOW, + resources=allowed_buckets_kms_keys + ) + return None diff --git a/backend/dataall/modules/datasets/cdk/pivot_role_datasets_policy.py b/backend/dataall/modules/datasets/cdk/pivot_role_datasets_policy.py new file mode 100644 index 000000000..1e2337a9a --- /dev/null +++ b/backend/dataall/modules/datasets/cdk/pivot_role_datasets_policy.py @@ -0,0 +1,141 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class DatasetsPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with Datasets based in S3 and Glue databases + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + # S3 Imported Buckets - restrict resources via bucket policies + iam.PolicyStatement( + sid='ImportedBuckets', + effect=iam.Effect.ALLOW, + actions=[ + 's3:List*', + 's3:GetBucket*', + 's3:GetLifecycleConfiguration', + 's3:GetObject', + 's3:PutBucketPolicy', + 's3:PutBucketTagging', + 's3:PutObject', + 's3:PutObjectAcl', + 's3:PutBucketOwnershipControls', + ], + resources=['arn:aws:s3:::*'], + ), + # For dataset preview + iam.PolicyStatement( + sid='AthenaWorkgroupsDataset', + effect=iam.Effect.ALLOW, + actions=[ + "athena:GetQueryExecution", + "athena:GetQueryResults", + "athena:GetWorkGroup", + "athena:StartQueryExecution" + ], + resources=[f'arn:aws:athena:*:{self.account}:workgroup/{self.env_resource_prefix}*'], + ), + # For Glue database management + iam.PolicyStatement( + sid='GlueCatalog', + effect=iam.Effect.ALLOW, + actions=[ + 'glue:BatchCreatePartition', + 'glue:BatchDeletePartition', + 'glue:BatchDeleteTable', + 'glue:CreateDatabase', + 'glue:CreatePartition', + 'glue:CreateTable', + 'glue:DeleteDatabase', + 'glue:DeletePartition', + 'glue:DeleteTable', + 'glue:BatchGet*', + 'glue:Get*', + 'glue:List*', + 'glue:SearchTables', + 'glue:UpdateDatabase', + 'glue:UpdatePartition', + 'glue:UpdateTable', + 'glue:TagResource', + 'glue:DeleteResourcePolicy', + 'glue:PutResourcePolicy', + ], + resources=['*'], + ), + # Manage LF permissions for glue databases + iam.PolicyStatement( + sid='LakeFormation', + effect=iam.Effect.ALLOW, + actions=[ + 'lakeformation:UpdateResource', + 'lakeformation:DescribeResource', + 'lakeformation:AddLFTagsToResource', + 'lakeformation:RemoveLFTagsFromResource', + 'lakeformation:GetResourceLFTags', + 'lakeformation:ListLFTags', + 'lakeformation:CreateLFTag', + 'lakeformation:GetLFTag', + 'lakeformation:UpdateLFTag', + 'lakeformation:DeleteLFTag', + 'lakeformation:SearchTablesByLFTags', + 'lakeformation:SearchDatabasesByLFTags', + 'lakeformation:ListResources', + 'lakeformation:ListPermissions', + 'lakeformation:GrantPermissions', + 'lakeformation:BatchGrantPermissions', + 'lakeformation:RevokePermissions', + 'lakeformation:BatchRevokePermissions', + 'lakeformation:PutDataLakeSettings', + 'lakeformation:GetDataLakeSettings', + 'lakeformation:GetDataAccess', + 'lakeformation:GetWorkUnits', + 'lakeformation:StartQueryPlanning', + 'lakeformation:GetWorkUnitResults', + 'lakeformation:GetQueryState', + 'lakeformation:GetQueryStatistics', + 'lakeformation:GetTableObjects', + 'lakeformation:UpdateTableObjects', + 'lakeformation:DeleteObjectsOnCancel', + ], + resources=['*'], + ), + # Glue ETL - needed to start crawler and profiling jobs + iam.PolicyStatement( + sid='GlueETL', + effect=iam.Effect.ALLOW, + actions=[ + 'glue:StartCrawler', + 'glue:StartJobRun', + 'glue:StartTrigger', + 'glue:UpdateTrigger', + 'glue:UpdateJob', + 'glue:UpdateCrawler', + ], + resources=[ + f'arn:aws:glue:*:{self.account}:crawler/{self.env_resource_prefix}*', + f'arn:aws:glue:*:{self.account}:job/{self.env_resource_prefix}*', + f'arn:aws:glue:*:{self.account}:trigger/{self.env_resource_prefix}*', + ], + ), + iam.PolicyStatement( + sid="PassRoleGlue", + actions=[ + 'iam:PassRole', + ], + resources=[ + f'arn:aws:iam::{self.account}:role/{self.env_resource_prefix}*', + ], + conditions={ + "StringEquals": { + "iam:PassedToService": [ + "glue.amazonaws.com", + ] + } + } + ) + ] + return statements diff --git a/backend/dataall/modules/datasets/db/__init__.py b/backend/dataall/modules/datasets/db/__init__.py new file mode 100644 index 000000000..104b49a42 --- /dev/null +++ b/backend/dataall/modules/datasets/db/__init__.py @@ -0,0 +1 @@ +"""Database logic for datasets""" diff --git a/backend/dataall/modules/datasets/db/dataset_column_repositories.py b/backend/dataall/modules/datasets/db/dataset_column_repositories.py new file mode 100644 index 000000000..7351d71b5 --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_column_repositories.py @@ -0,0 +1,45 @@ +from operator import or_ + +from dataall.base.db import paginate +from dataall.base.db.exceptions import ObjectNotFound +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn + + +class DatasetColumnRepository: + @staticmethod + def get_column(session, column_uri) -> DatasetTableColumn: + column = session.query(DatasetTableColumn).get(column_uri) + if not column: + raise ObjectNotFound('Column', column_uri) + return column + + @staticmethod + def save_and_commit(session, column: DatasetTableColumn): + session.add(column) + session.commit() + + @staticmethod + def paginate_active_columns_for_table(session, table_uri: str, filter: dict): + q = ( + session.query(DatasetTableColumn) + .filter( + DatasetTableColumn.tableUri == table_uri, + DatasetTableColumn.deleted.is_(None), + ) + .order_by(DatasetTableColumn.columnType.asc()) + ) + + if 'term' in filter: + term = filter['term'] + q = q.filter( + or_( + DatasetTableColumn.label.ilike('%' + term + '%'), + DatasetTableColumn.description.ilike('%' + term + '%'), + ) + ).order_by(DatasetTableColumn.columnType.asc()) + + return paginate( + query=q, + page=filter.get('page', 1), + page_size=filter.get('pageSize', 10) + ).to_dict() diff --git a/backend/dataall/modules/datasets/db/dataset_location_repositories.py b/backend/dataall/modules/datasets/db/dataset_location_repositories.py new file mode 100644 index 000000000..4f2b14c91 --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_location_repositories.py @@ -0,0 +1,147 @@ +import logging + +from sqlalchemy import and_, or_ + +from dataall.base.db import paginate, exceptions +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, Dataset + +logger = logging.getLogger(__name__) + + +class DatasetLocationRepository: + + @staticmethod + def exists(session, dataset_uri: str, prefix: str): + return ( + session.query(DatasetStorageLocation) + .filter( + and_( + DatasetStorageLocation.datasetUri == dataset_uri, + DatasetStorageLocation.S3Prefix == prefix, + ) + ) + .count() + ) + + @staticmethod + def create_dataset_location( + session, + dataset: Dataset, + data: dict = None + ) -> DatasetStorageLocation: + location = DatasetStorageLocation( + datasetUri=dataset.datasetUri, + label=data.get('label'), + description=data.get('description', 'No description provided'), + tags=data.get('tags', []), + S3Prefix=data.get('prefix'), + S3BucketName=dataset.S3BucketName, + AWSAccountId=dataset.AwsAccountId, + owner=dataset.owner, + region=dataset.region, + ) + session.add(location) + session.commit() + return location + + @staticmethod + def list_dataset_locations( + session, + uri: str, + data: dict = None, + ) -> dict: + query = ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == uri) + .order_by(DatasetStorageLocation.created.desc()) + ) + if data.get('term'): + term = data.get('term') + query = query.filter( + DatasetStorageLocation.label.ilike('%' + term + '%') + ) + return paginate( + query, page=data.get('page', 1), page_size=data.get('pageSize', 10) + ).to_dict() + + @staticmethod + def delete(session, location): + session.delete(location) + + @staticmethod + def get_location_by_uri(session, location_uri) -> DatasetStorageLocation: + location: DatasetStorageLocation = session.query( + DatasetStorageLocation + ).get(location_uri) + if not location: + raise exceptions.ObjectNotFound('Folder', location_uri) + return location + + @staticmethod + def get_location_by_s3_prefix(session, s3_prefix, accountid, region): + location: DatasetStorageLocation = ( + session.query(DatasetStorageLocation) + .filter( + and_( + DatasetStorageLocation.S3Prefix.startswith(s3_prefix), + DatasetStorageLocation.AWSAccountId == accountid, + DatasetStorageLocation.region == region, + ) + ) + .first() + ) + if not location: + logging.info(f'No location found for {s3_prefix}|{accountid}|{region}') + else: + logging.info(f'Found location {location.locationUri}|{location.S3Prefix}') + return location + + @staticmethod + def count_dataset_locations(session, dataset_uri): + return ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .count() + ) + + @staticmethod + def delete_dataset_locations(session, dataset_uri) -> bool: + locations = ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .all() + ) + for location in locations: + session.delete(location) + return True + + @staticmethod + def get_dataset_folders(session, dataset_uri): + """return the dataset folders""" + return ( + session.query(DatasetStorageLocation) + .filter(DatasetStorageLocation.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def paginated_dataset_locations(session, uri, data=None) -> dict: + query = session.query(DatasetStorageLocation).filter( + DatasetStorageLocation.datasetUri == uri + ) + if data and data.get('term'): + query = query.filter( + or_( + *[ + DatasetStorageLocation.name.ilike( + '%' + data.get('term') + '%' + ), + DatasetStorageLocation.S3Prefix.ilike( + '%' + data.get('term') + '%' + ), + ] + ) + ) + return paginate( + query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) + ).to_dict() diff --git a/backend/dataall/modules/datasets/db/dataset_profiling_repositories.py b/backend/dataall/modules/datasets/db/dataset_profiling_repositories.py new file mode 100644 index 000000000..f0d810dd3 --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_profiling_repositories.py @@ -0,0 +1,125 @@ +from sqlalchemy import and_ + +from dataall.base.db import paginate +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun, DatasetTable + + +class DatasetProfilingRepository: + def __init__(self): + pass + + @staticmethod + def save_profiling(session, dataset, env, glue_table_name): + run = DatasetProfilingRun( + datasetUri=dataset.datasetUri, + status='RUNNING', + AwsAccountId=env.AwsAccountId, + GlueJobName=dataset.GlueProfilingJobName or 'Unknown', + GlueTriggerSchedule=dataset.GlueProfilingTriggerSchedule, + GlueTriggerName=dataset.GlueProfilingTriggerName, + GlueTableName=glue_table_name, + GlueJobRunId=None, + owner=dataset.owner, + label=dataset.GlueProfilingJobName or 'Unknown', + ) + + session.add(run) + session.commit() + return run + + @staticmethod + def update_run(session, run_uri, glue_job_run_id): + run = DatasetProfilingRepository.get_profiling_run( + session, profiling_run_uri=run_uri, glue_job_run_id=glue_job_run_id + ) + if glue_job_run_id: + run.GlueJobRunId = glue_job_run_id + session.commit() + return run + + @staticmethod + def get_profiling_run( + session, profiling_run_uri=None, glue_job_run_id=None, glue_table_name=None + ): + if profiling_run_uri: + run: DatasetProfilingRun = session.query( + DatasetProfilingRun + ).get(profiling_run_uri) + else: + run: DatasetProfilingRun = ( + session.query(DatasetProfilingRun) + .filter(DatasetProfilingRun.GlueJobRunId == glue_job_run_id) + .filter(DatasetProfilingRun.GlueTableName == glue_table_name) + .first() + ) + return run + + @staticmethod + def list_profiling_runs(session, dataset_uri): + # TODO filter is always default + filter = {} + q = ( + session.query(DatasetProfilingRun) + .filter(DatasetProfilingRun.datasetUri == dataset_uri) + .order_by(DatasetProfilingRun.created.desc()) + ) + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) + ).to_dict() + + @staticmethod + def list_table_profiling_runs(session, table_uri): + # TODO filter is always default + filter = {} + q = ( + session.query(DatasetProfilingRun) + .join( + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + ) + .filter( + and_( + DatasetTable.tableUri == table_uri, + DatasetTable.GlueTableName == DatasetProfilingRun.GlueTableName, + ) + ) + .order_by(DatasetProfilingRun.created.desc()) + ) + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20) + ).to_dict() + + @staticmethod + def get_table_last_profiling_run(session, table_uri): + return ( + session.query(DatasetProfilingRun) + .join( + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + ) + .filter(DatasetTable.tableUri == table_uri) + .filter( + DatasetTable.GlueTableName + == DatasetProfilingRun.GlueTableName + ) + .order_by(DatasetProfilingRun.created.desc()) + .first() + ) + + @staticmethod + def get_table_last_profiling_run_with_results(session, table_uri): + return ( + session.query(DatasetProfilingRun) + .join( + DatasetTable, + DatasetTable.datasetUri == DatasetProfilingRun.datasetUri, + ) + .filter(DatasetTable.tableUri == table_uri) + .filter( + DatasetTable.GlueTableName + == DatasetProfilingRun.GlueTableName + ) + .filter(DatasetProfilingRun.results.isnot(None)) + .order_by(DatasetProfilingRun.created.desc()) + .first() + ) diff --git a/backend/dataall/modules/datasets/db/dataset_table_repositories.py b/backend/dataall/modules/datasets/db/dataset_table_repositories.py new file mode 100644 index 000000000..ef1a73756 --- /dev/null +++ b/backend/dataall/modules/datasets/db/dataset_table_repositories.py @@ -0,0 +1,211 @@ +import logging +from datetime import datetime + +from sqlalchemy import or_ +from sqlalchemy.sql import and_ + +from dataall.base.db import exceptions +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareItemSM +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn, DatasetTable, Dataset +from dataall.base.utils import json_utils + +logger = logging.getLogger(__name__) + + +class DatasetTableRepository: + + @staticmethod + def save(session, table: DatasetTable): + session.add(table) + + @staticmethod + def create_synced_table(session, dataset: Dataset, table: dict): + updated_table = DatasetTable( + datasetUri=dataset.datasetUri, + label=table['Name'], + name=table['Name'], + region=dataset.region, + owner=dataset.owner, + GlueDatabaseName=dataset.GlueDatabaseName, + AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.S3BucketName, + S3Prefix=table.get('StorageDescriptor', {}).get('Location'), + GlueTableName=table['Name'], + LastGlueTableStatus='InSync', + GlueTableProperties=json_utils.to_json( + table.get('Parameters', {}) + ), + ) + session.add(updated_table) + session.commit() + return updated_table + + @staticmethod + def delete(session, table: DatasetTable): + session.delete(table) + + @staticmethod + def query_dataset_tables_shared_with_env( + session, environment_uri: str, dataset_uri: str, username: str, groups: [str] + ): + """For a given dataset, returns the list of Tables shared with the environment + This means looking at approved ShareObject items + for the share object associating the dataset and environment + """ + share_item_shared_states = ShareItemSM.get_share_item_shared_states() + env_tables_shared = ( + session.query(DatasetTable) # all tables + .join( + ShareObjectItem, # found in ShareObjectItem + ShareObjectItem.itemUri == DatasetTable.tableUri, + ) + .join( + ShareObject, # jump to share object + ShareObject.shareUri == ShareObjectItem.shareUri, + ) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, # for this dataset + ShareObject.environmentUri == environment_uri, # for this environment + ShareObjectItem.status.in_(share_item_shared_states), + or_( + ShareObject.owner == username, + ShareObject.principalId.in_(groups), + ), + ) + ) + .all() + ) + + return env_tables_shared + + @staticmethod + def get_dataset_table_by_uri(session, table_uri): + table: DatasetTable = session.query(DatasetTable).get(table_uri) + if not table: + raise exceptions.ObjectNotFound('DatasetTable', table_uri) + return table + + @staticmethod + def update_existing_tables_status(existing_tables, glue_tables): + for existing_table in existing_tables: + if existing_table.GlueTableName not in [t['Name'] for t in glue_tables]: + existing_table.LastGlueTableStatus = 'Deleted' + logger.info( + f'Table {existing_table.GlueTableName} status set to Deleted from Glue.' + ) + + @staticmethod + def find_all_active_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus != 'Deleted', + ) + ) + .all() + ) + + @staticmethod + def find_all_deleted_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + DatasetTable.LastGlueTableStatus == 'Deleted', + ) + ) + .all() + ) + + @staticmethod + def sync_table_columns(session, dataset_table, glue_table): + + DatasetTableRepository.delete_all_table_columns(session, dataset_table) + + columns = [ + {**item, **{'columnType': 'column'}} + for item in glue_table.get('StorageDescriptor', {}).get('Columns', []) + ] + partitions = [ + {**item, **{'columnType': f'partition_{index}'}} + for index, item in enumerate(glue_table.get('PartitionKeys', [])) + ] + + logger.debug(f'Found columns {columns} for table {dataset_table}') + logger.debug(f'Found partitions {partitions} for table {dataset_table}') + + for col in columns + partitions: + table_col = DatasetTableColumn( + name=col['Name'], + description=col.get('Comment', 'No description provided'), + label=col['Name'], + owner=dataset_table.owner, + datasetUri=dataset_table.datasetUri, + tableUri=dataset_table.tableUri, + AWSAccountId=dataset_table.AWSAccountId, + GlueDatabaseName=dataset_table.GlueDatabaseName, + GlueTableName=dataset_table.GlueTableName, + region=dataset_table.region, + typeName=col['Type'], + columnType=col['columnType'], + ) + session.add(table_col) + + @staticmethod + def delete_all_table_columns(session, dataset_table): + session.query(DatasetTableColumn).filter( + and_( + DatasetTableColumn.GlueDatabaseName == dataset_table.GlueDatabaseName, + DatasetTableColumn.GlueTableName == dataset_table.GlueTableName, + ) + ).delete() + session.commit() + + @staticmethod + def get_table_by_s3_prefix(session, s3_prefix, accountid, region): + table: DatasetTable = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.S3Prefix.startswith(s3_prefix), + DatasetTable.AWSAccountId == accountid, + DatasetTable.region == region, + ) + ) + .first() + ) + if not table: + logging.info(f'No table found for {s3_prefix}|{accountid}|{region}') + else: + logging.info( + f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' + ) + return table + + @staticmethod + def find_dataset_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def delete_dataset_tables(session, dataset_uri) -> bool: + tables = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == dataset_uri, + ) + ) + .all() + ) + for table in tables: + table.deleted = datetime.now() + return tables diff --git a/backend/dataall/modules/datasets/handlers/__init__.py b/backend/dataall/modules/datasets/handlers/__init__.py new file mode 100644 index 000000000..ce055bc1b --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/__init__.py @@ -0,0 +1,9 @@ +""" +Contains code with the handlers that are need for async +processing in a separate lambda function +""" +from dataall.modules.datasets.handlers import ( + glue_table_sync_handler, glue_profiling_handler, glue_dataset_handler +) + +__all__ = ["glue_table_sync_handler", "glue_profiling_handler", "glue_dataset_handler"] diff --git a/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py new file mode 100644 index 000000000..60a4565d6 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_dataset_handler.py @@ -0,0 +1,26 @@ +import logging + +from dataall.core.tasks.service_handlers import Worker +from dataall.core.tasks.db.task_models import Task +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) + + +class DatasetCrawlerHandler: + + @staticmethod + @Worker.handler(path='glue.crawler.start') + def start_crawler(engine, task: Task): + with engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri( + session, task.targetUri + ) + location = task.payload.get('location') + targets = {'S3Targets': [{'Path': location}]} + crawler = DatasetCrawler(dataset) + if location: + crawler.update_crawler(targets) + return crawler.start_crawler() diff --git a/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py new file mode 100644 index 000000000..847376df0 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_profiling_handler.py @@ -0,0 +1,30 @@ +import logging + +from dataall.core.tasks.service_handlers import Worker +from dataall.core.tasks.db.task_models import Task +from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient +from dataall.modules.datasets.db.dataset_profiling_repositories import DatasetProfilingRepository +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun, Dataset + +log = logging.getLogger(__name__) + + +class DatasetProfilingGlueHandler: + """A handler for dataset profiling""" + + @staticmethod + @Worker.handler('glue.job.profiling_run_status') + def get_profiling_run(engine, task: Task): + with engine.scoped_session() as session: + profiling: DatasetProfilingRun = ( + DatasetProfilingRepository.get_profiling_run( + session, profiling_run_uri=task.targetUri + ) + ) + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, profiling.datasetUri) + status = GlueDatasetProfilerClient(dataset).get_job_status(profiling) + + profiling.status = status + session.commit() + return {"profiling_status": profiling.status} diff --git a/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py new file mode 100644 index 000000000..236595be5 --- /dev/null +++ b/backend/dataall/modules/datasets/handlers/glue_table_sync_handler.py @@ -0,0 +1,53 @@ +import logging + +from dataall.core.tasks.service_handlers import Worker +from dataall.base.aws.sts import SessionHelper +from dataall.core.tasks.db.task_models import Task +from dataall.modules.datasets.aws.glue_table_client import GlueTableClient +from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn, DatasetTable + +log = logging.getLogger(__name__) + + +class DatasetColumnGlueHandler: + """A handler for dataset table columns""" + + @staticmethod + @Worker.handler('glue.table.update_column') + def update_table_columns(engine, task: Task): + with engine.scoped_session() as session: + column: DatasetTableColumn = session.query(DatasetTableColumn).get(task.targetUri) + table: DatasetTable = session.query(DatasetTable).get(column.tableUri) + + aws_session = SessionHelper.remote_session(table.AWSAccountId) + + lf_client = LakeFormationTableClient(table=table, aws_session=aws_session) + lf_client.grant_pivot_role_all_table_permissions() + + glue_client = GlueTableClient(aws_session=aws_session, table=table) + original_table = glue_client.get_table() + updated_table = { + k: v + for k, v in original_table['Table'].items() + if k not in [ + 'CatalogId', + 'VersionId', + 'DatabaseName', + 'CreateTime', + 'UpdateTime', + 'CreatedBy', + 'IsRegisteredWithLakeFormation', + ] + } + all_columns = updated_table.get('StorageDescriptor', {}).get( + 'Columns', [] + ) + updated_table.get('PartitionKeys', []) + for col in all_columns: + if col['Name'] == column.name: + col['Comment'] = column.description + log.info( + f'Found column {column.name} adding description {column.description}' + ) + + glue_client.update_table_for_column(column.name, updated_table) diff --git a/backend/dataall/modules/datasets/indexers/__init__.py b/backend/dataall/modules/datasets/indexers/__init__.py new file mode 100644 index 000000000..faf66363b --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/__init__.py @@ -0,0 +1 @@ +"""Contains dataset related indexers for OpenSearch""" diff --git a/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py new file mode 100644 index 000000000..210db834a --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/dataset_catalog_indexer.py @@ -0,0 +1,26 @@ +import logging + +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import Dataset +from dataall.modules.catalog.indexers.catalog_indexer import CatalogIndexer + +log = logging.getLogger(__name__) + + +class DatasetCatalogIndexer(CatalogIndexer): + """ + Dataset indexer for the catalog. Indexes all tables and folders of datasets + Register automatically itself when CatalogIndexer instance is created + """ + + def index(self, session) -> int: + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) + log.info(f'Found {len(all_datasets)} datasets') + indexed = 0 + for dataset in all_datasets: + tables = DatasetTableIndexer.upsert_all(session, dataset.datasetUri) + folders = DatasetLocationIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) + indexed += len(tables) + len(folders) + 1 + return indexed diff --git a/backend/dataall/modules/datasets/indexers/dataset_indexer.py b/backend/dataall/modules/datasets/indexers/dataset_indexer.py new file mode 100644 index 000000000..cc010d71c --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/dataset_indexer.py @@ -0,0 +1,54 @@ +"""Indexes Datasets in OpenSearch""" +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets.db.dataset_location_repositories import DatasetLocationRepository +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer + + +class DatasetIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, dataset_uri: str): + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) + + count_tables = DatasetRepository.count_dataset_tables(session, dataset_uri) + count_folders = DatasetLocationRepository.count_dataset_locations(session, dataset_uri) + count_upvotes = Vote.count_upvotes( + session, dataset_uri, target_type='dataset' + ) + + if dataset: + glossary = BaseIndexer._get_target_glossary_terms(session, dataset_uri) + BaseIndexer._index( + doc_id=dataset_uri, + doc={ + 'name': dataset.name, + 'owner': dataset.owner, + 'label': dataset.label, + 'admins': dataset.SamlAdminGroupName, + 'database': dataset.GlueDatabaseName, + 'source': dataset.S3BucketName, + 'resourceKind': 'dataset', + 'description': dataset.description, + 'classification': dataset.confidentiality, + 'tags': [t.replace('-', '') for t in dataset.tags or []], + 'topics': dataset.topics, + 'region': dataset.region.replace('-', ''), + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, + 'created': dataset.created, + 'updated': dataset.updated, + 'deleted': dataset.deleted, + 'glossary': glossary, + 'tables': count_tables, + 'folders': count_folders, + 'upvotes': count_upvotes, + }, + ) + return dataset diff --git a/backend/dataall/modules/datasets/indexers/location_indexer.py b/backend/dataall/modules/datasets/indexers/location_indexer.py new file mode 100644 index 000000000..cc0c25c43 --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/location_indexer.py @@ -0,0 +1,55 @@ +"""Indexes DatasetStorageLocation in OpenSearch""" +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.modules.datasets.db.dataset_location_repositories import DatasetLocationRepository +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer + + +class DatasetLocationIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, folder_uri: str): + folder = DatasetLocationRepository.get_location_by_uri(session, folder_uri) + + if folder: + dataset = DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) + glossary = BaseIndexer._get_target_glossary_terms(session, folder_uri) + + BaseIndexer._index( + doc_id=folder_uri, + doc={ + 'name': folder.name, + 'admins': dataset.SamlAdminGroupName, + 'owner': folder.owner, + 'label': folder.label, + 'resourceKind': 'folder', + 'description': folder.description, + 'source': dataset.S3BucketName, + 'classification': dataset.confidentiality, + 'tags': [f.replace('-', '') for f in folder.tags or []], + 'topics': dataset.topics, + 'region': folder.region.replace('-', ''), + 'datasetUri': folder.datasetUri, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, + 'created': folder.created, + 'updated': folder.updated, + 'deleted': folder.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=folder.datasetUri) + return folder + + @classmethod + def upsert_all(cls, session, dataset_uri: str): + folders = DatasetLocationRepository.get_dataset_folders(session, dataset_uri) + for folder in folders: + DatasetLocationIndexer.upsert(session=session, folder_uri=folder.locationUri) + return folders diff --git a/backend/dataall/modules/datasets/indexers/table_indexer.py b/backend/dataall/modules/datasets/indexers/table_indexer.py new file mode 100644 index 000000000..4c5e624ee --- /dev/null +++ b/backend/dataall/modules/datasets/indexers/table_indexer.py @@ -0,0 +1,65 @@ +"""Indexes DatasetTable in OpenSearch""" + +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer + + +class DatasetTableIndexer(BaseIndexer): + + @classmethod + def upsert(cls, session, table_uri: str): + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + + if table: + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + org = Organization.get_organization_by_uri(session, dataset.organizationUri) + glossary = BaseIndexer._get_target_glossary_terms(session, table_uri) + + tags = table.tags if table.tags else [] + BaseIndexer._index( + doc_id=table_uri, + doc={ + 'name': table.name, + 'admins': dataset.SamlAdminGroupName, + 'owner': table.owner, + 'label': table.label, + 'resourceKind': 'table', + 'description': table.description, + 'database': table.GlueDatabaseName, + 'source': table.S3BucketName, + 'classification': dataset.confidentiality, + 'tags': [t.replace('-', '') for t in tags or []], + 'topics': dataset.topics, + 'region': dataset.region.replace('-', ''), + 'datasetUri': table.datasetUri, + 'environmentUri': env.environmentUri, + 'environmentName': env.name, + 'organizationUri': org.organizationUri, + 'organizationName': org.name, + 'created': table.created, + 'updated': table.updated, + 'deleted': table.deleted, + 'glossary': glossary, + }, + ) + DatasetIndexer.upsert(session=session, dataset_uri=table.datasetUri) + return table + + @classmethod + def upsert_all(cls, session, dataset_uri: str): + tables = DatasetTableRepository.find_all_active_tables(session, dataset_uri) + for table in tables: + DatasetTableIndexer.upsert(session=session, table_uri=table.tableUri) + return tables + + @classmethod + def remove_all_deleted(cls, session, dataset_uri: str): + tables = DatasetTableRepository.find_all_deleted_tables(session, dataset_uri) + for table in tables: + cls.delete_doc(doc_id=table.tableUri) + return tables diff --git a/backend/dataall/modules/datasets/services/__init__.py b/backend/dataall/modules/datasets/services/__init__.py new file mode 100644 index 000000000..03ef29863 --- /dev/null +++ b/backend/dataall/modules/datasets/services/__init__.py @@ -0,0 +1 @@ +"""Contains business logic for datasets""" diff --git a/backend/dataall/modules/datasets/services/dataset_column_service.py b/backend/dataall/modules/datasets/services/dataset_column_service.py new file mode 100644 index 000000000..4226022bf --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_column_service.py @@ -0,0 +1,60 @@ +from dataall.core.tasks.service_handlers import Worker +from dataall.base.aws.sts import SessionHelper +from dataall.base.context import get_context +from dataall.core.permissions.permission_checker import has_resource_permission +from dataall.core.tasks.db.task_models import Task +from dataall.modules.datasets.aws.glue_table_client import GlueTableClient +from dataall.modules.datasets.db.dataset_column_repositories import DatasetColumnRepository +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, DatasetTableColumn +from dataall.modules.datasets_base.services.permissions import GET_DATASET_TABLE + + +class DatasetColumnService: + + @staticmethod + def _get_dataset_uri_for_column(session, column_uri): + column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) + return DatasetColumnService._get_dataset_uri(session, column.tableUri) + + @staticmethod + def _get_dataset_uri(session, table_uri): + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return table.datasetUri + + @staticmethod + @has_resource_permission(GET_DATASET_TABLE) + def paginate_active_columns_for_table(uri: str, filter=None): + with get_context().db_engine.scoped_session() as session: + return DatasetColumnRepository.paginate_active_columns_for_table(session, uri, filter) + + @classmethod + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri, param_name="table_uri") + def sync_table_columns(cls, table_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + aws = SessionHelper.remote_session(table.AWSAccountId) + glue_table = GlueTableClient(aws, table).get_table() + + DatasetTableRepository.sync_table_columns( + session, table, glue_table['Table'] + ) + return cls.paginate_active_columns_for_table(uri=table_uri, filter={}) + + @staticmethod + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri_for_column, param_name="column_uri") + def update_table_column_description(column_uri: str, description) -> DatasetTableColumn: + with get_context().db_engine.scoped_session() as session: + column: DatasetTableColumn = DatasetColumnRepository.get_column(session, column_uri) + column.description = description + + task = Task( + action='glue.table.update_column', targetUri=column.columnUri + ) + session.add(task) + session.commit() + + Worker.queue(engine=get_context().db_engine, task_ids=[task.taskUri]) + return column diff --git a/backend/dataall/modules/datasets/services/dataset_location_service.py b/backend/dataall/modules/datasets/services/dataset_location_service.py new file mode 100644 index 000000000..e0f3c5aa3 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_location_service.py @@ -0,0 +1,107 @@ +from dataall.base.context import get_context +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.base.db.exceptions import ResourceShared, ResourceAlreadyExists +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.datasets.aws.s3_location_client import S3LocationClient +from dataall.modules.datasets.db.dataset_location_repositories import DatasetLocationRepository +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_FOLDER, MANAGE_DATASETS, \ + CREATE_DATASET_FOLDER, LIST_DATASET_FOLDERS, DELETE_DATASET_FOLDER +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository + + +class DatasetLocationService: + @staticmethod + def _get_dataset_uri(session, uri): + location = DatasetLocationRepository.get_location_by_uri(session, uri) + return location.datasetUri + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET_FOLDER) + def create_storage_location(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + exists = DatasetLocationRepository.exists(session, uri, data['prefix']) + + if exists: + raise ResourceAlreadyExists( + action='Create Folder', + message=f'Folder: {data["prefix"]} already exist on dataset {uri}', + ) + + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + location = DatasetLocationRepository.create_dataset_location(session, dataset, data) + + if 'terms' in data.keys(): + DatasetLocationService._create_glossary_links(session, location, data['terms']) + + S3LocationClient(location).create_bucket_prefix() + + DatasetLocationIndexer.upsert(session=session, folder_uri=location.locationUri) + return location + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS) + def list_dataset_locations(uri: str, filter: dict = None): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.list_dataset_locations( + session=session, uri=uri, data=filter + ) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(LIST_DATASET_FOLDERS, parent_resource=_get_dataset_uri) + def get_storage_location(uri): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.get_location_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_FOLDER, parent_resource=_get_dataset_uri) + def update_storage_location(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + location = DatasetLocationRepository.get_location_by_uri(session, uri) + for k in data.keys(): + setattr(location, k, data.get(k)) + + if 'terms' in data.keys(): + DatasetLocationService._create_glossary_links(session, location, data['terms']) + + DatasetLocationIndexer.upsert(session, folder_uri=location.locationUri) + + return location + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_FOLDER, parent_resource=_get_dataset_uri) + def remove_storage_location(uri: str = None): + with get_context().db_engine.scoped_session() as session: + location = DatasetLocationRepository.get_location_by_uri(session, uri) + has_shares = ShareObjectRepository.has_shared_items(session, location.locationUri) + if has_shares: + raise ResourceShared( + action=DELETE_DATASET_FOLDER, + message='Revoke all folder shares before deletion', + ) + + ShareObjectRepository.delete_shares(session, location.locationUri) + DatasetLocationRepository.delete(session, location) + Glossary.delete_glossary_terms_links( + session, + target_uri=location.locationUri, + target_type='DatasetStorageLocation', + ) + DatasetLocationIndexer.delete_doc(doc_id=location.locationUri) + return True + + @staticmethod + def _create_glossary_links(session, location, terms): + Glossary.set_glossary_terms_links( + session, + get_context().username, + location.locationUri, + 'DatasetStorageLocation', + terms + ) diff --git a/backend/dataall/modules/datasets/services/dataset_permissions.py b/backend/dataall/modules/datasets/services/dataset_permissions.py new file mode 100644 index 000000000..7c9c3af3a --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_permissions.py @@ -0,0 +1,85 @@ +from itertools import chain + +from dataall.core.permissions.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL, RESOURCES_ALL_WITH_DESC, \ + ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + +MANAGE_DATASETS = 'MANAGE_DATASETS' + +TENANT_ALL.append(MANAGE_DATASETS) +TENANT_ALL_WITH_DESC[MANAGE_DATASETS] = 'Manage datasets' + +""" +DATASET PERMISSIONS +""" + +GET_DATASET = 'GET_DATASET' +LIST_DATASET_FOLDERS = 'LIST_DATASET_FOLDERS' +CREDENTIALS_DATASET = 'CREDENTIALS_DATASET' + +DATASET_READ = [ + GET_DATASET, + LIST_DATASET_FOLDERS, + CREDENTIALS_DATASET, +] + + +UPDATE_DATASET = 'UPDATE_DATASET' +SYNC_DATASET = 'SYNC_DATASET' +CRAWL_DATASET = 'CRAWL_DATASET' +DELETE_DATASET = 'DELETE_DATASET' +IMPORT_DATASET = 'IMPORT_DATASET' +DELETE_DATASET_TABLE = 'DELETE_DATASET_TABLE' +UPDATE_DATASET_TABLE = 'UPDATE_DATASET_TABLE' +PROFILE_DATASET_TABLE = 'PROFILE_DATASET_TABLE' +CREATE_DATASET_FOLDER = 'CREATE_DATASET_FOLDER' +DELETE_DATASET_FOLDER = 'DELETE_DATASET_FOLDER' +UPDATE_DATASET_FOLDER = 'UPDATE_DATASET_FOLDER' + +DATASET_WRITE = [ + UPDATE_DATASET, + SYNC_DATASET, + IMPORT_DATASET, + CREDENTIALS_DATASET, + CRAWL_DATASET, + DELETE_DATASET, + UPDATE_DATASET_TABLE, + DELETE_DATASET_TABLE, + PROFILE_DATASET_TABLE, + CREATE_DATASET_FOLDER, + DELETE_DATASET_FOLDER, + UPDATE_DATASET_FOLDER, + LIST_DATASET_FOLDERS, +] + +DATASET_ALL = list(set(DATASET_WRITE + DATASET_READ)) +RESOURCES_ALL.extend(DATASET_ALL) + + +RESOURCES_ALL.extend(DATASET_TABLE_READ) + +""" +DATASET PERMISSIONS FOR ENVIRONMENT +""" + +CREATE_DATASET = 'CREATE_DATASET' +LIST_ENVIRONMENT_DATASETS = 'LIST_ENVIRONMENT_DATASETS' + +ENVIRONMENT_INVITED.append(CREATE_DATASET) +ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_DATASETS) + +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_DATASET) +ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_DATASETS) + +ENVIRONMENT_ALL.append(CREATE_DATASET) +ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_DATASETS) + +RESOURCES_ALL.append(CREATE_DATASET) +RESOURCES_ALL.append(LIST_ENVIRONMENT_DATASETS) + + +for perm in chain(DATASET_ALL, DATASET_TABLE_READ): + RESOURCES_ALL_WITH_DESC[perm] = perm + +RESOURCES_ALL_WITH_DESC[CREATE_DATASET] = 'Create datasets on this environment' +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_DATASETS] = "List datasets on this environment" diff --git a/backend/dataall/modules/datasets/services/dataset_profiling_service.py b/backend/dataall/modules/datasets/services/dataset_profiling_service.py new file mode 100644 index 000000000..802aef882 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_profiling_service.py @@ -0,0 +1,123 @@ +import json + +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.tasks.service_handlers import Worker +from dataall.base.context import get_context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.permission_checker import has_resource_permission +from dataall.core.tasks.db.task_models import Task +from dataall.base.db.exceptions import ObjectNotFound +from dataall.modules.datasets.aws.glue_profiler_client import GlueDatasetProfilerClient +from dataall.modules.datasets.aws.s3_profiler_client import S3ProfilerClient +from dataall.modules.datasets.db.dataset_profiling_repositories import DatasetProfilingRepository +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets.services.dataset_permissions import PROFILE_DATASET_TABLE, GET_DATASET +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.enums import ConfidentialityClassification +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun, DatasetTable +from dataall.modules.datasets_base.services.permissions import GET_DATASET_TABLE, PREVIEW_DATASET_TABLE + + +class DatasetProfilingService: + @staticmethod + @has_resource_permission(PROFILE_DATASET_TABLE) + def start_profiling_run(uri, table_uri, glue_table_name): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + if table_uri and not glue_table_name: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + if not table: + raise ObjectNotFound('DatasetTable', table_uri) + glue_table_name = table.GlueTableName + + environment: Environment = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + if not environment: + raise ObjectNotFound('Environment', dataset.environmentUri) + + run = DatasetProfilingRepository.save_profiling( + session=session, + dataset=dataset, + env=environment, + glue_table_name=glue_table_name, + ) + + run_id = GlueDatasetProfilerClient(dataset).run_job(run) + + DatasetProfilingRepository.update_run( + session, + run_uri=run.profilingRunUri, + glue_job_run_id=run_id, + ) + + return run + + @staticmethod + def resolve_profiling_run_status(run_uri): + context = get_context() + with context.db_engine.scoped_session() as session: + task = Task( + targetUri=run_uri, action='glue.job.profiling_run_status' + ) + session.add(task) + Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) + + @staticmethod + @has_resource_permission(GET_DATASET) + def list_profiling_runs(uri): + with get_context().db_engine.scoped_session() as session: + return DatasetProfilingRepository.list_profiling_runs(session, uri) + + @classmethod + @has_resource_permission(GET_DATASET_TABLE) + def get_dataset_table_profiling_run(cls, uri: str): + with get_context().db_engine.scoped_session() as session: + cls._check_preview_permissions_if_needed(session, table_uri=uri) + run: DatasetProfilingRun = ( + DatasetProfilingRepository.get_table_last_profiling_run(session, uri) + ) + + if run: + if not run.results: + table = DatasetTableRepository.get_dataset_table_by_uri(session, uri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + environment = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + content = S3ProfilerClient(environment).get_profiling_results_from_s3(dataset, table, run) + if content: + results = json.loads(content) + run.results = results + + if not run.results: + run_with_results = ( + DatasetProfilingRepository.get_table_last_profiling_run_with_results(session, uri) + ) + if run_with_results: + run = run_with_results + + return run + + @classmethod + @has_resource_permission(GET_DATASET_TABLE) + def list_table_profiling_runs(cls, uri: str): + with get_context().db_engine.scoped_session() as session: + cls._check_preview_permissions_if_needed(session=session, table_uri=uri) + return DatasetProfilingRepository.list_table_profiling_runs(session, uri) + + @staticmethod + def _check_preview_permissions_if_needed(session, table_uri): + context = get_context() + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( + session, table_uri + ) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + if dataset.confidentiality != ConfidentialityClassification.Unclassified.value: + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=table.tableUri, + permission_name=PREVIEW_DATASET_TABLE, + ) + return True diff --git a/backend/dataall/modules/datasets/services/dataset_service.py b/backend/dataall/modules/datasets/services/dataset_service.py new file mode 100644 index 000000000..bd8500710 --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_service.py @@ -0,0 +1,534 @@ +import json +import logging + +from dataall.base.aws.quicksight import QuicksightClient +from dataall.base.db import exceptions +from dataall.core.tasks.service_handlers import Worker +from dataall.base.aws.sts import SessionHelper +from dataall.base.context import get_context +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.tasks.db.task_models import Task +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.base.db.exceptions import AWSResourceNotFound, UnauthorizedOperation +from dataall.modules.dataset_sharing.aws.kms_client import KmsClient +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_APPROVER +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetClient +from dataall.modules.datasets.db.dataset_location_repositories import DatasetLocationRepository +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer +from dataall.modules.datasets.services.dataset_permissions import CREDENTIALS_DATASET, CRAWL_DATASET, \ + DELETE_DATASET, MANAGE_DATASETS, UPDATE_DATASET, LIST_ENVIRONMENT_DATASETS, \ + CREATE_DATASET, DATASET_ALL, DATASET_READ, IMPORT_DATASET +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.enums import DatasetRole +from dataall.modules.datasets_base.db.dataset_models import Dataset, DatasetTable +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + +log = logging.getLogger(__name__) + + +class DatasetService: + + @staticmethod + def check_dataset_account(session, environment): + dashboards_enabled = EnvironmentService.get_boolean_env_param(session, environment, "dashboardsEnabled") + if dashboards_enabled: + quicksight_subscription = QuicksightClient.check_quicksight_enterprise_subscription( + AwsAccountId=environment.AwsAccountId) + if quicksight_subscription: + group = QuicksightClient.create_quicksight_group(AwsAccountId=environment.AwsAccountId) + return True if group else False + return True + + @staticmethod + def check_imported_resources(environment, data): + kms_alias = data.get('KmsKeyAlias') + if kms_alias not in [None, "Undefined", "", "SSE-S3"]: + key_id = KmsClient(environment.AwsAccountId, environment.region).get_key_id( + key_alias=f"alias/{kms_alias}" + ) + if not key_id: + raise exceptions.AWSResourceNotFound( + action=IMPORT_DATASET, + message=f'KMS key with alias={kms_alias} cannot be found', + ) + return True + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(CREATE_DATASET) + @has_group_permission(CREATE_DATASET) + def create_dataset(uri, admin_group, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + environment = EnvironmentService.get_environment_by_uri(session, uri) + DatasetService.check_dataset_account(session=session, environment=environment) + if data.get('imported', False): + DatasetService.check_imported_resources(environment=environment, data=data) + + dataset = DatasetRepository.create_dataset( + session=session, + username=context.username, + uri=uri, + data=data, + ) + + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlAdminGroupName'], + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if dataset.stewards and dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if environment.SamlGroupName != dataset.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=environment.SamlGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + + DatasetService._create_dataset_stack(session, dataset) + + DatasetIndexer.upsert( + session=session, dataset_uri=dataset.datasetUri + ) + + DatasetService._deploy_dataset_stack(dataset) + + dataset.userRoleForDataset = DatasetRole.Creator.value + + return dataset + + @staticmethod + def import_dataset(uri, admin_group, data): + data['imported'] = True + return DatasetService.create_dataset(uri=uri, admin_group=admin_group, data=data) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + def get_dataset(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if dataset.SamlAdminGroupName in context.groups: + dataset.userRoleForDataset = DatasetRole.Admin.value + return dataset + + @staticmethod + def get_file_upload_presigned_url(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + return S3DatasetClient(dataset).get_file_upload_presigned_url(data) + + @staticmethod + def list_datasets(data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return ShareObjectRepository.paginated_user_datasets( + session, context.username, context.groups, data=data + ) + + @staticmethod + def list_locations(dataset_uri, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetLocationRepository.paginated_dataset_locations( + session=session, + uri=dataset_uri, + data=data, + ) + + @staticmethod + def list_tables(dataset_uri, data: dict): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.paginated_dataset_tables( + session=session, + uri=dataset_uri, + data=data, + ) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET) + def update_dataset(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + environment = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + DatasetService.check_dataset_account(session=session, environment=environment) + if data.get('imported', False): + DatasetService.check_imported_resources(environment=environment, data=data) + + username = get_context().username + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if data and isinstance(data, dict): + for k in data.keys(): + if k != 'stewards': + setattr(dataset, k, data.get(k)) + if data.get('KmsAlias') not in ["Undefined"]: + dataset.KmsAlias = "SSE-S3" if data.get('KmsAlias') == "" else data.get('KmsAlias') + dataset.importedKmsKey = False if data.get('KmsAlias') == "" else True + if data.get('stewards') and data.get('stewards') != dataset.stewards: + if data.get('stewards') != dataset.SamlAdminGroupName: + DatasetService._transfer_stewardship_to_new_stewards( + session, dataset, data['stewards'] + ) + dataset.stewards = data['stewards'] + else: + DatasetService._transfer_stewardship_to_owners(session, dataset) + dataset.stewards = dataset.SamlAdminGroupName + + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=DATASET_ALL, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + if data.get('terms'): + Glossary.set_glossary_terms_links(session, username, uri, 'Dataset', data.get('terms')) + DatasetRepository.update_dataset_activity(session, dataset, username) + + DatasetIndexer.upsert(session, dataset_uri=uri) + + DatasetService._deploy_dataset_stack(dataset) + + return dataset + + @staticmethod + def get_dataset_statistics(dataset: Dataset): + with get_context().db_engine.scoped_session() as session: + count_tables = DatasetRepository.count_dataset_tables(session, dataset.datasetUri) + count_locations = DatasetLocationRepository.count_dataset_locations( + session, dataset.datasetUri + ) + count_upvotes = Vote.count_upvotes( + session, dataset.datasetUri, target_type='dataset' + ) + return { + 'tables': count_tables or 0, + 'locations': count_locations or 0, + 'upvotes': count_upvotes or 0, + } + + @staticmethod + @has_resource_permission(CREDENTIALS_DATASET) + def get_dataset_assume_role_url(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + if dataset.SamlAdminGroupName not in context.groups: + share = ShareObjectRepository.get_share_by_dataset_attributes( + session=session, + dataset_uri=uri, + dataset_owner=context.username + ) + shared_environment = EnvironmentService.get_environment_by_uri( + session=session, + uri=share.environmentUri + ) + env_group = EnvironmentService.get_environment_group( + session=session, + group_uri=share.principalId, + environment_uri=share.environmentUri + ) + role_arn = env_group.environmentIAMRoleArn + account_id = shared_environment.AwsAccountId + else: + role_arn = dataset.IAMDatasetAdminRoleArn + account_id = dataset.AwsAccountId + + pivot_session = SessionHelper.remote_session(account_id) + aws_session = SessionHelper.get_session( + base_session=pivot_session, role_arn=role_arn + ) + url = SessionHelper.get_console_access_url( + aws_session, + region=dataset.region, + bucket=dataset.S3BucketName, + ) + return url + + @staticmethod + @has_resource_permission(CRAWL_DATASET) + def start_crawler(uri: str, data: dict = None): + engine = get_context().db_engine + with engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + location = ( + f's3://{dataset.S3BucketName}/{data.get("prefix")}' + if data.get('prefix') + else f's3://{dataset.S3BucketName}' + ) + + crawler = DatasetCrawler(dataset).get_crawler() + if not crawler: + raise AWSResourceNotFound( + action=CRAWL_DATASET, + message=f'Crawler {dataset.GlueCrawlerName} can not be found', + ) + + task = Task( + targetUri=uri, + action='glue.crawler.start', + payload={'location': location}, + ) + session.add(task) + session.commit() + + Worker.queue(engine=engine, task_ids=[task.taskUri]) + + return { + 'Name': dataset.GlueCrawlerName, + 'AwsAccountId': dataset.AwsAccountId, + 'region': dataset.region, + 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), + } + + @staticmethod + def list_dataset_share_objects(dataset: Dataset, data: dict = None): + with get_context().db_engine.scoped_session() as session: + return ShareObjectRepository.paginated_dataset_shares( + session=session, + uri=dataset.datasetUri, + data=data + ) + + @staticmethod + @has_resource_permission(CREDENTIALS_DATASET) + def generate_dataset_access_token(uri): + with get_context().db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + pivot_session = SessionHelper.remote_session(dataset.AwsAccountId) + aws_session = SessionHelper.get_session( + base_session=pivot_session, role_arn=dataset.IAMDatasetAdminRoleArn + ) + c = aws_session.get_credentials() + credentials = { + 'AccessKey': c.access_key, + 'SessionKey': c.secret_key, + 'sessionToken': c.token, + } + + return json.dumps(credentials) + + @staticmethod + def get_dataset_stack(dataset: Dataset): + return stack_helper.get_stack_with_cfn_resources( + targetUri=dataset.datasetUri, + environmentUri=dataset.environmentUri, + ) + + @staticmethod + @has_resource_permission(DELETE_DATASET) + def delete_dataset(uri: str, delete_from_aws: bool = False): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, uri) + env = EnvironmentService.get_environment_by_uri( + session, dataset.environmentUri + ) + shares = ShareObjectRepository.list_dataset_shares_with_existing_shared_items(session, uri) + if shares: + raise UnauthorizedOperation( + action=DELETE_DATASET, + message=f'Dataset {dataset.name} is shared with other teams. ' + 'Revoke all dataset shares before deletion.', + ) + + tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, uri)] + for uri in tables: + DatasetIndexer.delete_doc(doc_id=uri) + + folders = [f.locationUri for f in DatasetLocationRepository.get_dataset_folders(session, uri)] + for uri in folders: + DatasetIndexer.delete_doc(doc_id=uri) + + DatasetIndexer.delete_doc(doc_id=uri) + + ShareObjectRepository.delete_shares_with_no_shared_items(session, uri) + DatasetService.delete_dataset_term_links(session, uri) + DatasetTableRepository.delete_dataset_tables(session, dataset.datasetUri) + DatasetLocationRepository.delete_dataset_locations(session, dataset.datasetUri) + KeyValueTag.delete_key_value_tags(session, dataset.datasetUri, 'dataset') + Vote.delete_votes(session, dataset.datasetUri, 'dataset') + + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.SamlAdminGroupName + ) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + if dataset.SamlAdminGroupName != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=env.SamlGroupName + ) + if dataset.stewards: + ResourcePolicy.delete_resource_policy( + session=session, resource_uri=uri, group=dataset.stewards + ) + + DatasetRepository.delete_dataset(session, dataset) + + if delete_from_aws: + stack_helper.delete_stack( + target_uri=uri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region, + ) + stack_helper.deploy_stack(dataset.environmentUri) + return True + + @staticmethod + def _deploy_dataset_stack(dataset: Dataset): + """ + Each dataset stack deployment triggers environment stack update + to rebuild teams IAM roles data access policies + """ + stack_helper.deploy_stack(dataset.datasetUri) + stack_helper.deploy_stack(dataset.environmentUri) + + @staticmethod + def _create_dataset_stack(session, dataset: Dataset) -> Stack: + return Stack.create_stack( + session=session, + environment_uri=dataset.environmentUri, + target_uri=dataset.datasetUri, + target_label=dataset.label, + target_type='dataset', + payload={ + 'bucket_name': dataset.S3BucketName, + 'database_name': dataset.GlueDatabaseName, + 'role_name': dataset.S3BucketName, + 'user_name': dataset.S3BucketName, + }, + ) + + @staticmethod + @has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def list_datasets_created_in_environment(uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetRepository.paginated_environment_datasets( + session=session, + uri=uri, + data=data, + ) + + @staticmethod + def list_datasets_owned_by_env_group(env_uri: str, group_uri: str, data: dict): + with get_context().db_engine.scoped_session() as session: + return DatasetRepository.paginated_environment_group_datasets( + session=session, + env_uri=env_uri, + group_uri=group_uri, + data=data, + ) + + @staticmethod + def _transfer_stewardship_to_owners(session, dataset): + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=dataset.datasetUri, + ) + + # Remove Steward Resource Policy on Dataset Tables + dataset_tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset.datasetUri)] + for tableUri in dataset_tables: + if dataset.stewards != env.SamlGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=tableUri, + ) + + # Remove Steward Resource Policy on Dataset Share Objects + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) + return dataset + + @staticmethod + def _transfer_stewardship_to_new_stewards(session, dataset, new_stewards): + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=dataset.datasetUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_READ, + resource_uri=dataset.datasetUri, + resource_type=Dataset.__name__, + ) + + dataset_tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset.datasetUri)] + for tableUri in dataset_tables: + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=tableUri, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=DATASET_TABLE_READ, + resource_uri=tableUri, + resource_type=DatasetTable.__name__, + ) + + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + ResourcePolicy.attach_resource_policy( + session=session, + group=new_stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicy.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) + return dataset + + @staticmethod + def delete_dataset_term_links(session, dataset_uri): + tables = [t.tableUri for t in DatasetRepository.get_dataset_tables(session, dataset_uri)] + for table_uri in tables: + Glossary.delete_glossary_terms_links(session, table_uri, 'DatasetTable') + Glossary.delete_glossary_terms_links(session, dataset_uri, 'Dataset') diff --git a/backend/dataall/modules/datasets/services/dataset_table_service.py b/backend/dataall/modules/datasets/services/dataset_table_service.py new file mode 100644 index 000000000..fdfdca51c --- /dev/null +++ b/backend/dataall/modules/datasets/services/dataset_table_service.py @@ -0,0 +1,184 @@ +import logging + +from dataall.base.context import get_context +from dataall.modules.catalog.db.glossary_repositories import Glossary +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.base.db.exceptions import ResourceShared +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.datasets.aws.athena_table_client import AthenaTableClient +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS, \ + DELETE_DATASET_TABLE, SYNC_DATASET +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.enums import ConfidentialityClassification +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.datasets_base.services.permissions import PREVIEW_DATASET_TABLE, DATASET_TABLE_READ, \ + GET_DATASET_TABLE +from dataall.base.utils import json_utils + +log = logging.getLogger(__name__) + + +class DatasetTableService: + @staticmethod + def _get_dataset_uri(session, table_uri): + table = DatasetTableRepository.get_dataset_table_by_uri(session, table_uri) + return table.datasetUri + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + def get_table(uri: str): + with get_context().db_engine.scoped_session() as session: + return DatasetTableRepository.get_dataset_table_by_uri(session, uri) + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def update_table(uri: str, table_data: dict = None): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.get_dataset_table_by_uri(session, uri) + + for k in [attr for attr in table_data.keys() if attr != 'terms']: + setattr(table, k, table_data.get(k)) + + DatasetTableRepository.save(session, table) + if 'terms' in table_data: + Glossary.set_glossary_terms_links( + session, get_context().username, table.tableUri, 'DatasetTable', table_data['terms'] + ) + + DatasetTableIndexer.upsert(session, table_uri=table.tableUri) + return table + + @staticmethod + @has_tenant_permission(MANAGE_DATASETS) + @has_resource_permission(DELETE_DATASET_TABLE, parent_resource=_get_dataset_uri) + def delete_table(uri: str): + with get_context().db_engine.scoped_session() as session: + table = DatasetTableRepository.get_dataset_table_by_uri(session, uri) + has_share = ShareObjectRepository.has_shared_items(session, table.tableUri) + if has_share: + raise ResourceShared( + action=DELETE_DATASET_TABLE, + message='Revoke all table shares before deletion', + ) + + ShareObjectRepository.delete_shares(session, table.tableUri) + DatasetTableRepository.delete(session, table) + + Glossary.delete_glossary_terms_links( + session, target_uri=table.tableUri, target_type='DatasetTable' + ) + DatasetTableIndexer.delete_doc(doc_id=uri) + return True + + @staticmethod + def preview(table_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri( + session, table_uri + ) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + if ( + dataset.confidentiality != ConfidentialityClassification.Unclassified.value + ): + ResourcePolicy.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=table.tableUri, + permission_name=PREVIEW_DATASET_TABLE, + ) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + return AthenaTableClient(env, table).get_table(dataset_uri=dataset.datasetUri) + + @staticmethod + @has_resource_permission(GET_DATASET_TABLE) + def get_glue_table_properties(uri: str): + with get_context().db_engine.scoped_session() as session: + table: DatasetTable = DatasetTableRepository.get_dataset_table_by_uri(session, uri) + return json_utils.to_string(table.GlueTableProperties).replace('\\', ' ') + + @staticmethod + def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): + context = get_context() + with context.db_engine.scoped_session() as session: + return [ + {"tableUri": t.tableUri, "GlueTableName": t.GlueTableName} + for t in DatasetTableRepository.query_dataset_tables_shared_with_env( + session, env_uri, dataset_uri, context.username, context.groups + ) + ] + + @classmethod + @has_resource_permission(SYNC_DATASET) + def sync_tables_for_dataset(cls, uri): + context = get_context() + with context.db_engine.scoped_session() as session: + dataset = DatasetRepository.get_dataset_by_uri(session, uri) + + tables = DatasetCrawler(dataset).list_glue_database_tables() + cls.sync_existing_tables(session, dataset.datasetUri, glue_tables=tables) + DatasetTableIndexer.upsert_all( + session=session, dataset_uri=dataset.datasetUri + ) + DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) + return DatasetRepository.paginated_dataset_tables( + session=session, + uri=uri, + data={'page': 1, 'pageSize': 10}, + ) + + @staticmethod + def sync_existing_tables(session, dataset_uri, glue_tables=None): + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + if dataset: + existing_tables = DatasetTableRepository.find_dataset_tables(session, dataset_uri) + existing_table_names = [e.GlueTableName for e in existing_tables] + existing_dataset_tables_map = {t.GlueTableName: t for t in existing_tables} + + DatasetTableRepository.update_existing_tables_status(existing_tables, glue_tables) + log.info( + f'existing_tables={glue_tables}' + ) + for table in glue_tables: + if table['Name'] not in existing_table_names: + log.info( + f'Storing new table: {table} for dataset db {dataset.GlueDatabaseName}' + ) + updated_table = DatasetTableRepository.create_synced_table(session, dataset, table) + DatasetTableService._attach_dataset_table_permission(session, dataset, updated_table.tableUri) + else: + log.info( + f'Updating table: {table} for dataset db {dataset.GlueDatabaseName}' + ) + updated_table: DatasetTable = ( + existing_dataset_tables_map.get(table['Name']) + ) + updated_table.GlueTableProperties = json_utils.to_json( + table.get('Parameters', {}) + ) + + DatasetTableRepository.sync_table_columns(session, updated_table, table) + + return True + + @staticmethod + def _attach_dataset_table_permission(session, dataset: Dataset, table_uri): + # ADD DATASET TABLE PERMISSIONS + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) + permission_group = {dataset.SamlAdminGroupName, env.SamlGroupName, + dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName} + for group in permission_group: + ResourcePolicy.attach_resource_policy( + session=session, + group=group, + permissions=DATASET_TABLE_READ, + resource_uri=table_uri, + resource_type=DatasetTable.__name__, + ) diff --git a/backend/dataall/modules/datasets/tasks/__init__.py b/backend/dataall/modules/datasets/tasks/__init__.py new file mode 100644 index 000000000..da597f309 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/__init__.py @@ -0,0 +1 @@ +"""Code of the long-running tasks that run in ECS""" diff --git a/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py new file mode 100644 index 000000000..a2f995371 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/bucket_policy_updater.py @@ -0,0 +1,172 @@ +import json +import logging +import os +import sys + +from sqlalchemy import and_ + +from dataall.base.db import get_engine +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.datasets.aws.s3_dataset_client import S3DatasetBucketPolicyClient +from dataall.modules.datasets_base.db.dataset_models import Dataset + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + + +class BucketPoliciesUpdater: + def __init__(self, engine, event=None): + self.engine = engine + self.event = event + self.reports = [] + + def sync_imported_datasets_bucket_policies(self): + with self.engine.scoped_session() as session: + imported_datasets = ( + session.query(Dataset) + .filter( + and_( + Dataset.imported, + Dataset.deleted.is_(None), + ) + ) + .all() + ) + log.info(f'Found {len(imported_datasets)} imported datasets') + + for dataset in imported_datasets: + account_prefixes = {} + + shared_tables = ShareObjectRepository.get_shared_tables(session, dataset) + log.info( + f'Found {len(shared_tables)} shared tables with dataset {dataset.S3BucketName}' + ) + + shared_folders = ShareObjectRepository.get_shared_folders(session, dataset) + log.info( + f'Found {len(shared_folders)} shared folders with dataset {dataset.S3BucketName}' + ) + + for table in shared_tables: + data_prefix = self.clear_table_location_from_delta_path(table) + prefix = data_prefix.rstrip('/') + '/*' + accountid = table.TargetAwsAccountId + + prefix = f"arn:aws:s3:::{prefix.split('s3://')[1]}" + self.group_prefixes_by_accountid( + accountid, prefix, account_prefixes + ) + + bucket = ( + f"arn:aws:s3:::{prefix.split('arn:aws:s3:::')[1].split('/')[0]}" + ) + self.group_prefixes_by_accountid( + accountid, bucket, account_prefixes + ) + + for folder in shared_folders: + prefix = f'arn:aws:s3:::{folder.S3Prefix}' + '/*' + accountid = folder.AwsAccountId + self.group_prefixes_by_accountid( + accountid, prefix, account_prefixes + ) + bucket = ( + f"arn:aws:s3:::{prefix.split('arn:aws:s3:::')[1].split('/')[0]}" + ) + self.group_prefixes_by_accountid( + accountid, bucket, account_prefixes + ) + + client = S3DatasetBucketPolicyClient(dataset) + + policy = client.get_bucket_policy() + + BucketPoliciesUpdater.update_policy(account_prefixes, policy) + + report = client.put_bucket_policy(policy) + + self.reports.append(report) + + if any(r['status'] == 'FAILED' for r in self.reports): + raise Exception( + 'Failed to update one or more bucket policies' + f'Check the reports: {self.reports}' + ) + return self.reports + + @staticmethod + def clear_table_location_from_delta_path(table): + data_prefix = ( + table.S3Prefix + if '/packages.delta' not in table.S3Prefix + else table.S3Prefix.replace('/packages.delta', '') + ) + data_prefix = ( + data_prefix + if '/_symlink_format_manifest' not in data_prefix + else data_prefix.replace('/_symlink_format_manifest', '') + ) + return data_prefix + + @staticmethod + def update_policy(account_prefixes, policy): + log.info('Updating Policy') + statements = policy['Statement'] + for key, value in account_prefixes.items(): + added = False + for s in statements: + if key in s.get('Principal').get('AWS') and 'DA' in s.get('Sid'): + log.info(f'Principal already on the policy {key}') + added = True + for v in value: + if v not in s.get('Resource'): + existing_resources = ( + list(s.get('Resource')) + if not isinstance(s.get('Resource'), list) + else s.get('Resource') + ) + existing_resources.append(v) + s['Resource'] = existing_resources + break + if not added: + log.info( + f'Principal {key} with permissions {value} ' + f'Not on the policy adding it' + ) + statements.append( + { + 'Sid': f'DA{key}', + 'Effect': 'Allow', + 'Action': ['s3:Get*', 's3:List*'], + 'Resource': value + if isinstance(value, list) and len(value) > 1 + else value, + 'Principal': {'AWS': key}, + } + ) + policy.update({'Statement': statements}) + log.info(f'Final Policy --> {policy}') + return policy + + @classmethod + def group_prefixes_by_accountid(cls, accountid, prefix, account_prefixes): + if account_prefixes.get(accountid): + prefixes = account_prefixes[accountid] + if prefix not in prefixes: + prefixes.append(prefix) + account_prefixes[accountid] = prefixes + else: + account_prefixes[accountid] = [prefix] + return account_prefixes + + +if __name__ == '__main__': + ENVNAME = os.environ.get('envname', 'local') + ENGINE = get_engine(envname=ENVNAME) + log.info('Updating bucket policies for shared datasets...') + service = BucketPoliciesUpdater(engine=ENGINE) + service.sync_imported_datasets_bucket_policies() + log.info('Bucket policies for shared datasets update successfully...') diff --git a/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py b/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py new file mode 100644 index 000000000..5f2a90104 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/dataset_stack_finder.py @@ -0,0 +1,19 @@ +import logging +from typing import List + +from dataall.core.environment.services.env_stack_finder import StackFinder +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import Dataset + +log = logging.getLogger(__name__) + + +class DatasetStackFinder(StackFinder): + """ + Dataset stack finder. Looks for datasets stack to update + Register automatically itself when StackFinder instance is created + """ + def find_stack_uris(self, session) -> List[str]: + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets(session) + log.info(f'Found {len(all_datasets)} datasets') + return [dataset.datasetUri for dataset in all_datasets] diff --git a/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py new file mode 100644 index 000000000..6a83fc5c8 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/dataset_subscription_task.py @@ -0,0 +1,172 @@ +import logging +import os +import sys + +from botocore.exceptions import ClientError + +from dataall.core.tasks.service_handlers import Worker +from dataall.base.aws.sqs import SqsQueue +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.base.db import get_engine +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.dataset_sharing.services.share_notification_service import ShareNotificationService +from dataall.modules.datasets.aws.sns_dataset_client import SnsDatasetClient +from dataall.modules.datasets.db.dataset_location_repositories import DatasetLocationRepository +from dataall.modules.datasets.db.dataset_table_repositories import DatasetTableRepository +from dataall.modules.datasets.tasks.subscriptions import poll_queues +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, DatasetTable, Dataset + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + + +class DatasetSubscriptionService: + def __init__(self, engine): + self.engine = engine + + @staticmethod + def get_environments(engine): + with engine.scoped_session() as session: + return EnvironmentService.list_all_active_environments(session) + + @staticmethod + def get_queues(environments: [Environment]): + queues = [] + for env in environments: + queues.append( + { + 'url': f'https://sqs.{env.region}.amazonaws.com/{env.AwsAccountId}/{env.resourcePrefix}-producers-queue-{env.environmentUri}', + 'region': env.region, + 'accountid': env.AwsAccountId, + 'arn': f'arn:aws:sqs:{env.region}:{env.AwsAccountId}:ProducersSubscriptionsQueue-{env.environmentUri}', + 'name': f'{env.resourcePrefix}-producers-queue-{env.environmentUri}', + } + ) + return queues + + def notify_consumers(self, engine, messages): + log.info(f'Notifying consumers with messages {messages}') + + with engine.scoped_session() as session: + for message in messages: + self.publish_table_update_message(session, message) + self.publish_location_update_message(session, message) + + return True + + def publish_table_update_message(self, session, message): + table: DatasetTable = DatasetTableRepository.get_table_by_s3_prefix( + session, + message.get('prefix'), + message.get('accountid'), + message.get('region'), + ) + if not table: + log.info(f'No table for message {message}') + else: + log.info( + f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' + ) + + message['table'] = table.GlueTableName + self._publish_update_message(session, message, table, table) + + def publish_location_update_message(self, session, message): + location: DatasetStorageLocation = ( + DatasetLocationRepository.get_location_by_s3_prefix( + session, + message.get('prefix'), + message.get('accountid'), + message.get('region'), + ) + ) + if not location: + log.info(f'No location found for message {message}') + + else: + log.info(f'Found location {location.locationUri}|{location.S3Prefix}') + self._publish_update_message(session, message, location) + + def _publish_update_message(self, session, message, entity, table: DatasetTable = None): + dataset: Dataset = DatasetRepository.get_dataset_by_uri(session, entity.datasetUri) + + log.info( + f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' + ) + share_items: [ShareObjectItem] = ShareObjectRepository.find_share_items_by_item_uri(session, entity.uri()) + log.info(f'Found shared items for location {share_items}') + + return self.publish_sns_message( + session, message, dataset, share_items, entity.S3Prefix, table + ) + + def publish_sns_message( + self, session, message, dataset, share_items, prefix, table: DatasetTable = None + ): + for item in share_items: + share_object = ShareObjectRepository.get_approved_share_object(session, item) + if not share_object or not share_object.principalId: + log.error( + f'Share Item with no share object or no principalId ? {item.shareItemUri}' + ) + else: + environment = session.query(Environment).get( + share_object.principalId + ) + if not environment: + log.error( + f'Environment of share owner was deleted ? {share_object.principalId}' + ) + else: + log.info(f'Notifying share owner {share_object.owner}') + + log.info( + f'found environment {environment.environmentUri}|{environment.AwsAccountId} of share owner {share_object.owner}' + ) + + try: + log.info( + f'Producer message before notifications: {message}' + ) + + message = { + 'location': prefix, + 'owner': dataset.owner, + 'message': f'Dataset owner {dataset.owner} ' + f'has updated the table shared with you {prefix}', + } + + sns_client = SnsDatasetClient(environment, dataset) + response = sns_client.publish_dataset_message(message) + log.info(f'SNS update publish response {response}') + + notifications = ShareNotificationService.notify_new_data_available_from_owners( + session=session, + dataset=dataset, + share=share_object, + s3_prefix=prefix, + ) + log.info(f'Notifications for share owners {notifications}') + + except ClientError as e: + log.error( + f'Failed to deliver message {message} due to: {e}' + ) + + +if __name__ == '__main__': + ENVNAME = os.environ.get('envname', 'local') + ENGINE = get_engine(envname=ENVNAME) + Worker.queue = SqsQueue.send + log.info('Polling datasets updates...') + service = DatasetSubscriptionService(ENGINE) + queues = service.get_queues(service.get_environments(ENGINE)) + messages = poll_queues(queues) + service.notify_consumers(ENGINE, messages) + log.info('Datasets updates shared successfully') diff --git a/backend/dataall/modules/datasets/tasks/subscriptions/__init__.py b/backend/dataall/modules/datasets/tasks/subscriptions/__init__.py new file mode 100644 index 000000000..fa0214e42 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/subscriptions/__init__.py @@ -0,0 +1 @@ +from .sqs_poller import poll_queues diff --git a/backend/dataall/tasks/subscriptions/sqs_poller.py b/backend/dataall/modules/datasets/tasks/subscriptions/sqs_poller.py similarity index 100% rename from backend/dataall/tasks/subscriptions/sqs_poller.py rename to backend/dataall/modules/datasets/tasks/subscriptions/sqs_poller.py diff --git a/backend/dataall/modules/datasets/tasks/tables_syncer.py b/backend/dataall/modules/datasets/tasks/tables_syncer.py new file mode 100644 index 000000000..5c89d1351 --- /dev/null +++ b/backend/dataall/modules/datasets/tasks/tables_syncer.py @@ -0,0 +1,111 @@ +import logging +import os +import sys +from operator import and_ + +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.base.db import get_engine +from dataall.modules.datasets.aws.glue_dataset_client import DatasetCrawler +from dataall.modules.datasets.aws.lf_table_client import LakeFormationTableClient +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService + +root = logging.getLogger() +root.setLevel(logging.INFO) +if not root.hasHandlers(): + root.addHandler(logging.StreamHandler(sys.stdout)) +log = logging.getLogger(__name__) + + +def sync_tables(engine): + with engine.scoped_session() as session: + processed_tables = [] + all_datasets: [Dataset] = DatasetRepository.list_all_active_datasets( + session + ) + log.info(f'Found {len(all_datasets)} datasets for tables sync') + dataset: Dataset + for dataset in all_datasets: + log.info( + f'Synchronizing dataset {dataset.name}|{dataset.datasetUri} tables' + ) + env: Environment = ( + session.query(Environment) + .filter( + and_( + Environment.environmentUri == dataset.environmentUri, + Environment.deleted.is_(None), + ) + ) + .first() + ) + env_group: EnvironmentGroup = ( + EnvironmentService.get_environment_group( + session, dataset.SamlAdminGroupName, env.environmentUri + ) + ) + try: + if not env or not is_assumable_pivot_role(env): + log.info( + f'Dataset {dataset.GlueDatabaseName} has an invalid environment' + ) + else: + + tables = DatasetCrawler(dataset).list_glue_database_tables() + + log.info( + f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' + ) + + DatasetTableService.sync_existing_tables( + session, dataset.datasetUri, glue_tables=tables + ) + + tables = ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset.datasetUri) + .all() + ) + + log.info('Updating tables permissions on Lake Formation...') + + for table in tables: + LakeFormationTableClient(table).grant_principals_all_table_permissions( + principals=[ + SessionHelper.get_delegation_role_arn(env.AwsAccountId), + env_group.environmentIAMRoleArn, + ], + ) + + processed_tables.extend(tables) + + DatasetTableIndexer.upsert_all(session, dataset_uri=dataset.datasetUri) + except Exception as e: + log.error( + f'Failed to sync tables for dataset ' + f'{dataset.AwsAccountId}/{dataset.GlueDatabaseName} ' + f'due to: {e}' + ) + DatasetAlarmService().trigger_dataset_sync_failure_alarm(dataset, str(e)) + return processed_tables + + +def is_assumable_pivot_role(env: Environment): + aws_session = SessionHelper.remote_session(accountid=env.AwsAccountId) + if not aws_session: + log.error( + f'Failed to assume dataall pivot role in environment {env.AwsAccountId}' + ) + return False + return True + + +if __name__ == '__main__': + ENVNAME = os.environ.get('envname', 'local') + ENGINE = get_engine(envname=ENVNAME) + sync_tables(engine=ENGINE) diff --git a/backend/dataall/modules/datasets_base/__init__.py b/backend/dataall/modules/datasets_base/__init__.py new file mode 100644 index 000000000..8d8e5c807 --- /dev/null +++ b/backend/dataall/modules/datasets_base/__init__.py @@ -0,0 +1,8 @@ +from typing import Set +from dataall.base.loader import ModuleInterface, ImportMode + + +class DatasetBaseModuleInterface(ModuleInterface): + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return True diff --git a/backend/dataall/modules/datasets_base/db/__init__.py b/backend/dataall/modules/datasets_base/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets_base/db/dataset_models.py b/backend/dataall/modules/datasets_base/db/dataset_models.py new file mode 100644 index 000000000..a5fcf1260 --- /dev/null +++ b/backend/dataall/modules/datasets_base/db/dataset_models.py @@ -0,0 +1,143 @@ +from sqlalchemy import Boolean, Column, String, Text, ForeignKey +from sqlalchemy.dialects.postgresql import JSON, ARRAY +from sqlalchemy.orm import query_expression +from dataall.base.db import Base, Resource, utils + + +class DatasetTableColumn(Resource, Base): + __tablename__ = 'dataset_table_column' + datasetUri = Column(String, nullable=False) + tableUri = Column(String, nullable=False) + columnUri = Column(String, primary_key=True, default=utils.uuid('col')) + AWSAccountId = Column(String, nullable=False) + region = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=False) + GlueTableName = Column(String, nullable=False) + region = Column(String, default='eu-west-1') + typeName = Column(String, nullable=False) + columnType = Column( + String, default='column' + ) # can be either "column" or "partition" + + @classmethod + def uri(cls): + return cls.columnUri + + +class DatasetProfilingRun(Resource, Base): + __tablename__ = 'dataset_profiling_run' + profilingRunUri = Column( + String, primary_key=True, default=utils.uuid('profilingrun') + ) + datasetUri = Column(String, nullable=False) + GlueJobName = Column(String) + GlueJobRunId = Column(String) + GlueTriggerSchedule = Column(String) + GlueTriggerName = Column(String) + GlueTableName = Column(String) + AwsAccountId = Column(String) + results = Column(JSON, default={}) + status = Column(String, default='Created') + + +class DatasetStorageLocation(Resource, Base): + __tablename__ = 'dataset_storage_location' + datasetUri = Column(String, nullable=False) + locationUri = Column(String, primary_key=True, default=utils.uuid('location')) + AWSAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + S3Prefix = Column(String, nullable=False) + S3AccessPoint = Column(String, nullable=True) + region = Column(String, default='eu-west-1') + locationCreated = Column(Boolean, default=False) + userRoleForStorageLocation = query_expression() + projectPermission = query_expression() + environmentEndPoint = query_expression() + + @classmethod + def uri(cls): + return cls.locationUri + + +class DatasetTable(Resource, Base): + __tablename__ = 'dataset_table' + datasetUri = Column(String, nullable=False) + tableUri = Column(String, primary_key=True, default=utils.uuid('table')) + AWSAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + S3Prefix = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=False) + GlueTableName = Column(String, nullable=False) + GlueTableConfig = Column(Text) + GlueTableProperties = Column(JSON, default={}) + LastGlueTableStatus = Column(String, default='InSync') + region = Column(String, default='eu-west-1') + # LastGeneratedPreviewDate= Column(DateTime, default=None) + confidentiality = Column(String, nullable=True) + userRoleForTable = query_expression() + projectPermission = query_expression() + stage = Column(String, default='RAW') + topics = Column(ARRAY(String), nullable=True) + confidentiality = Column(String, nullable=False, default='C1') + + @classmethod + def uri(cls): + return cls.tableUri + + +class Dataset(Resource, Base): + __tablename__ = 'dataset' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + organizationUri = Column(String, nullable=False) + datasetUri = Column(String, primary_key=True, default=utils.uuid('dataset')) + region = Column(String, default='eu-west-1') + AwsAccountId = Column(String, nullable=False) + S3BucketName = Column(String, nullable=False) + GlueDatabaseName = Column(String, nullable=False) + GlueCrawlerName = Column(String) + GlueCrawlerSchedule = Column(String) + GlueProfilingJobName = Column(String) + GlueProfilingTriggerSchedule = Column(String) + GlueProfilingTriggerName = Column(String) + GlueDataQualityJobName = Column(String) + GlueDataQualitySchedule = Column(String) + GlueDataQualityTriggerName = Column(String) + IAMDatasetAdminRoleArn = Column(String, nullable=False) + IAMDatasetAdminUserArn = Column(String, nullable=False) + KmsAlias = Column(String, nullable=False) + userRoleForDataset = query_expression() + userRoleInEnvironment = query_expression() + isPublishedInEnvironment = query_expression() + projectPermission = query_expression() + language = Column(String, nullable=False, default='English') + topics = Column(ARRAY(String), nullable=True) + confidentiality = Column(String, nullable=False, default='Unclassified') + tags = Column(ARRAY(String)) + inProject = query_expression() + + bucketCreated = Column(Boolean, default=False) + glueDatabaseCreated = Column(Boolean, default=False) + iamAdminRoleCreated = Column(Boolean, default=False) + iamAdminUserCreated = Column(Boolean, default=False) + kmsAliasCreated = Column(Boolean, default=False) + lakeformationLocationCreated = Column(Boolean, default=False) + bucketPolicyCreated = Column(Boolean, default=False) + + # bookmarked = Column(Integer, default=0) + # upvotes=Column(Integer, default=0) + + businessOwnerEmail = Column(String, nullable=True) + businessOwnerDelegationEmails = Column(ARRAY(String), nullable=True) + stewards = Column(String, nullable=True) + + SamlAdminGroupName = Column(String, nullable=True) + + importedS3Bucket = Column(Boolean, default=False) + importedGlueDatabase = Column(Boolean, default=False) + importedKmsKey = Column(Boolean, default=False) + importedAdminRole = Column(Boolean, default=False) + imported = Column(Boolean, default=False) + + @classmethod + def uri(cls): + return cls.datasetUri diff --git a/backend/dataall/modules/datasets_base/db/dataset_repositories.py b/backend/dataall/modules/datasets_base/db/dataset_repositories.py new file mode 100644 index 000000000..bfa727100 --- /dev/null +++ b/backend/dataall/modules/datasets_base/db/dataset_repositories.py @@ -0,0 +1,338 @@ +import logging + +from sqlalchemy import and_, or_ +from sqlalchemy.orm import Query + +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.organizations.db.organization_repositories import Organization +from dataall.base.db import paginate +from dataall.base.db.exceptions import ObjectNotFound +from dataall.modules.datasets_base.db.enums import ConfidentialityClassification, Language +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) + +logger = logging.getLogger(__name__) + + +class DatasetRepository(EnvironmentResource): + """DAO layer for Datasets""" + + @staticmethod + def get_dataset_by_uri(session, dataset_uri) -> Dataset: + dataset: Dataset = session.query(Dataset).get(dataset_uri) + if not dataset: + raise ObjectNotFound('Dataset', dataset_uri) + return dataset + + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return ( + session.query(Dataset) + .filter( + and_( + Dataset.environmentUri == environment.environmentUri, + Dataset.SamlAdminGroupName == group_uri + )) + .count() + ) + + @staticmethod + def create_dataset( + session, + username: str, + uri: str, + data: dict = None, + ) -> Dataset: + environment = EnvironmentService.get_environment_by_uri(session, uri) + + organization = Organization.get_organization_by_uri( + session, environment.organizationUri + ) + + dataset = Dataset( + label=data.get('label'), + owner=username, + description=data.get('description', 'No description provided'), + tags=data.get('tags', []), + AwsAccountId=environment.AwsAccountId, + SamlAdminGroupName=data['SamlAdminGroupName'], + region=environment.region, + S3BucketName='undefined', + GlueDatabaseName='undefined', + IAMDatasetAdminRoleArn='undefined', + IAMDatasetAdminUserArn='undefined', + KmsAlias='undefined', + environmentUri=environment.environmentUri, + organizationUri=environment.organizationUri, + language=data.get('language', Language.English.value), + confidentiality=data.get( + 'confidentiality', ConfidentialityClassification.Unclassified.value + ), + topics=data.get('topics', []), + businessOwnerEmail=data.get('businessOwnerEmail'), + businessOwnerDelegationEmails=data.get('businessOwnerDelegationEmails', []), + stewards=data.get('stewards') + if data.get('stewards') + else data['SamlAdminGroupName'], + ) + session.add(dataset) + session.commit() + + DatasetRepository._set_dataset_aws_resources(dataset, data, environment) + DatasetRepository._set_import_data(dataset, data) + + activity = Activity( + action='dataset:create', + label='dataset:create', + owner=username, + summary=f'{username} created dataset {dataset.name} in {environment.name} on organization {organization.name}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + return dataset + + @staticmethod + def _set_dataset_aws_resources(dataset: Dataset, data, environment): + + bucket_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.S3, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + dataset.S3BucketName = data.get('bucketName') or bucket_name + + glue_db_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.GLUE, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + dataset.GlueDatabaseName = data.get('glueDatabaseName') or glue_db_name + + dataset.KmsAlias = bucket_name + + iam_role_name = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.IAM, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + iam_role_arn = f'arn:aws:iam::{dataset.AwsAccountId}:role/{iam_role_name}' + if data.get('adminRoleName'): + dataset.IAMDatasetAdminRoleArn = ( + f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" + ) + dataset.IAMDatasetAdminUserArn = ( + f"arn:aws:iam::{dataset.AwsAccountId}:role/{data['adminRoleName']}" + ) + else: + dataset.IAMDatasetAdminRoleArn = iam_role_arn + dataset.IAMDatasetAdminUserArn = iam_role_arn + + glue_etl_basename = NamingConventionService( + target_uri=dataset.datasetUri, + target_label=dataset.label, + pattern=NamingConventionPattern.GLUE_ETL, + resource_prefix=environment.resourcePrefix, + ).build_compliant_name() + + dataset.GlueCrawlerName = f"{glue_etl_basename}-crawler" + dataset.GlueProfilingJobName = f"{glue_etl_basename}-profiler" + dataset.GlueProfilingTriggerSchedule = None + dataset.GlueProfilingTriggerName = f"{glue_etl_basename}-trigger" + dataset.GlueDataQualityJobName = f"{glue_etl_basename}-dataquality" + dataset.GlueDataQualitySchedule = None + dataset.GlueDataQualityTriggerName = f"{glue_etl_basename}-dqtrigger" + return dataset + + @staticmethod + def paginated_dataset_tables(session, uri, data=None) -> dict: + query = ( + session.query(DatasetTable) + .filter( + and_( + DatasetTable.datasetUri == uri, + DatasetTable.LastGlueTableStatus != 'Deleted', + ) + ) + .order_by(DatasetTable.created.desc()) + ) + if data and data.get('term'): + query = query.filter( + or_( + *[ + DatasetTable.name.ilike('%' + data.get('term') + '%'), + DatasetTable.GlueTableName.ilike( + '%' + data.get('term') + '%' + ), + ] + ) + ) + return paginate( + query=query, page_size=data.get('pageSize', 10), page=data.get('page', 1) + ).to_dict() + + @staticmethod + def update_dataset_activity(session, dataset, username) : + activity = Activity( + action='dataset:update', + label='dataset:update', + owner=username, + summary=f'{username} updated dataset {dataset.name}', + targetUri=dataset.datasetUri, + targetType='dataset', + ) + session.add(activity) + session.commit() + + @staticmethod + def update_bucket_status(session, dataset_uri): + """ + helper method to update the dataset bucket status + """ + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + dataset.bucketCreated = True + return dataset + + @staticmethod + def update_glue_database_status(session, dataset_uri): + """ + helper method to update the dataset db status + """ + dataset = DatasetRepository.get_dataset_by_uri(session, dataset_uri) + dataset.glueDatabaseCreated = True + + @staticmethod + def get_dataset_tables(session, dataset_uri): + """return the dataset tables""" + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .all() + ) + + @staticmethod + def delete_dataset(session, dataset) -> bool: + session.delete(dataset) + return True + + @staticmethod + def list_all_datasets(session) -> [Dataset]: + return session.query(Dataset).all() + + @staticmethod + def list_all_active_datasets(session) -> [Dataset]: + return ( + session.query(Dataset).filter(Dataset.deleted.is_(None)).all() + ) + + @staticmethod + def get_dataset_by_bucket_name(session, bucket) -> [Dataset]: + return ( + session.query(Dataset) + .filter(Dataset.S3BucketName == bucket) + .first() + ) + + @staticmethod + def count_dataset_tables(session, dataset_uri): + return ( + session.query(DatasetTable) + .filter(DatasetTable.datasetUri == dataset_uri) + .count() + ) + + @staticmethod + def query_environment_group_datasets(session, env_uri, group_uri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == env_uri, + Dataset.SamlAdminGroupName == group_uri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + def query_environment_datasets(session, uri, filter) -> Query: + query = session.query(Dataset).filter( + and_( + Dataset.environmentUri == uri, + Dataset.deleted.is_(None), + ) + ) + if filter and filter.get('term'): + term = filter['term'] + query = query.filter( + or_( + Dataset.label.ilike('%' + term + '%'), + Dataset.description.ilike('%' + term + '%'), + Dataset.tags.contains(f'{{{term}}}'), + Dataset.region.ilike('%' + term + '%'), + ) + ) + return query + + @staticmethod + def paginated_environment_datasets( + session, uri, data=None, + ) -> dict: + return paginate( + query=DatasetRepository.query_environment_datasets( + session, uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def paginated_environment_group_datasets( + session, env_uri, group_uri, data=None + ) -> dict: + return paginate( + query=DatasetRepository.query_environment_group_datasets( + session, env_uri, group_uri, data + ), + page=data.get('page', 1), + page_size=data.get('pageSize', 10), + ).to_dict() + + @staticmethod + def list_group_datasets(session, environment_id, group_uri): + return ( + session.query(Dataset) + .filter( + and_( + Dataset.environmentUri == environment_id, + Dataset.SamlAdminGroupName == group_uri, + ) + ) + .all() + ) + + @staticmethod + def _set_import_data(dataset, data): + dataset.imported = True if data.get('imported') else False + dataset.importedS3Bucket = True if data.get('bucketName') else False + dataset.importedGlueDatabase = True if data.get('glueDatabaseName') else False + dataset.importedKmsKey = True if data.get('KmsKeyAlias') else False + dataset.importedAdminRole = True if data.get('adminRoleName') else False + if data.get('imported'): + dataset.KmsAlias = data.get('KmsKeyAlias') if data.get('KmsKeyAlias') else "SSE-S3" diff --git a/backend/dataall/modules/datasets_base/db/enums.py b/backend/dataall/modules/datasets_base/db/enums.py new file mode 100644 index 000000000..be779e58d --- /dev/null +++ b/backend/dataall/modules/datasets_base/db/enums.py @@ -0,0 +1,23 @@ +from enum import Enum + + +class ConfidentialityClassification(Enum): + Unclassified = 'Unclassified' + Official = 'Official' + Secret = 'Secret' + + +class DatasetRole(Enum): + # Permissions on a dataset + BusinessOwner = '999' + DataSteward = '998' + Creator = '950' + Admin = '900' + Shared = '300' + NoPermission = '000' + + +class Language(Enum): + English = 'English' + French = 'French' + German = 'German' diff --git a/backend/dataall/modules/datasets_base/services/__init__.py b/backend/dataall/modules/datasets_base/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/datasets_base/services/permissions.py b/backend/dataall/modules/datasets_base/services/permissions.py new file mode 100644 index 000000000..5d44ec983 --- /dev/null +++ b/backend/dataall/modules/datasets_base/services/permissions.py @@ -0,0 +1,12 @@ + +""" +DATASET TABLE PERMISSIONS +""" + +GET_DATASET_TABLE = 'GET_DATASET_TABLE' +PREVIEW_DATASET_TABLE = 'PREVIEW_DATASET_TABLE' + +DATASET_TABLE_READ = [ + GET_DATASET_TABLE, + PREVIEW_DATASET_TABLE +] diff --git a/backend/dataall/modules/feed/__init__.py b/backend/dataall/modules/feed/__init__.py new file mode 100644 index 000000000..e9bce29b3 --- /dev/null +++ b/backend/dataall/modules/feed/__init__.py @@ -0,0 +1,14 @@ +from typing import Set + +from dataall.base.loader import ModuleInterface, ImportMode + + +class FeedApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for feeds code in GraphQL Lambda""" + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.feed.api diff --git a/backend/dataall/modules/feed/api/__init__.py b/backend/dataall/modules/feed/api/__init__.py new file mode 100644 index 000000000..fe8cedc8e --- /dev/null +++ b/backend/dataall/modules/feed/api/__init__.py @@ -0,0 +1 @@ +from dataall.modules.feed.api import resolvers, input_types, types, queries, mutations, registry diff --git a/backend/dataall/modules/feed/api/input_types.py b/backend/dataall/modules/feed/api/input_types.py new file mode 100644 index 000000000..349cec590 --- /dev/null +++ b/backend/dataall/modules/feed/api/input_types.py @@ -0,0 +1,14 @@ +from dataall.base.api import gql + +FeedMessageInput = gql.InputType( + name='FeedMessageInput', arguments=[gql.Argument(name='content', type=gql.String)] +) + +FeedMessageFilter = gql.InputType( + name='FeedMessageFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) diff --git a/backend/dataall/modules/feed/api/mutations.py b/backend/dataall/modules/feed/api/mutations.py new file mode 100644 index 000000000..a94d7beee --- /dev/null +++ b/backend/dataall/modules/feed/api/mutations.py @@ -0,0 +1,14 @@ +from dataall.base.api import gql +from dataall.modules.feed.api.resolvers import post_message + + +postFeedMessage = gql.MutationField( + name='postFeedMessage', + resolver=post_message, + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('FeedMessageInput')), + ], + type=gql.Ref('FeedMessage'), +) diff --git a/backend/dataall/modules/feed/api/queries.py b/backend/dataall/modules/feed/api/queries.py new file mode 100644 index 000000000..ee65c848e --- /dev/null +++ b/backend/dataall/modules/feed/api/queries.py @@ -0,0 +1,13 @@ +from dataall.base.api import gql +from dataall.modules.feed.api.resolvers import get_feed + + +getFeed = gql.QueryField( + name='getFeed', + resolver=get_feed, + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + type=gql.Ref('Feed'), +) diff --git a/backend/dataall/modules/feed/api/registry.py b/backend/dataall/modules/feed/api/registry.py new file mode 100644 index 000000000..6acea0480 --- /dev/null +++ b/backend/dataall/modules/feed/api/registry.py @@ -0,0 +1,36 @@ +from dataclasses import dataclass +from typing import Type, Dict + +from dataall.base.api import gql +from dataall.base.api.gql.graphql_union_type import UnionTypeRegistry +from dataall.base.db import Resource + + +@dataclass +class FeedDefinition: + target_type: str + model: Type[Resource] + + +class FeedRegistry(UnionTypeRegistry): + """Registers models for different target types""" + _DEFINITIONS: Dict[str, FeedDefinition] = {} + + @classmethod + def register(cls, definition: FeedDefinition): + cls._DEFINITIONS[definition.target_type] = definition + + @classmethod + def find_model(cls, target_type: str): + return cls._DEFINITIONS[target_type].model + + @classmethod + def find_target(cls, obj: Resource): + for target_type, definition in cls._DEFINITIONS.items(): + if isinstance(obj, definition.model): + return target_type + return None + + @classmethod + def types(cls): + return [gql.Ref(target_type) for target_type in cls._DEFINITIONS.keys()] diff --git a/backend/dataall/modules/feed/api/resolvers.py b/backend/dataall/modules/feed/api/resolvers.py new file mode 100644 index 000000000..22fd9c3fc --- /dev/null +++ b/backend/dataall/modules/feed/api/resolvers.py @@ -0,0 +1,85 @@ +from sqlalchemy import or_ + +from dataall.base.api.context import Context +from dataall.base.db import paginate +from dataall.modules.feed.api.registry import FeedRegistry +from dataall.modules.feed.db.feed_models import FeedMessage + + +class Feed: + def __init__(self, targetUri: str = None, targetType: str = None): + self._targetUri = targetUri + self._targetType = targetType + + @property + def targetUri(self): + return self._targetUri + + @property + def targetType(self): + return self._targetType + + +def resolve_feed_target_type(obj, *_): + return FeedRegistry.find_target(obj) + + +def resolve_target(context: Context, source: Feed, **kwargs): + if not source: + return None + with context.engine.scoped_session() as session: + model = FeedRegistry.find_model(source.targetType) + target = session.query(model).get(source.targetUri) + return target + + +def get_feed( + context: Context, + source, + targetUri: str = None, + targetType: str = None, + filter: dict = None, +) -> Feed: + return Feed(targetUri=targetUri, targetType=targetType) + + +def post_message( + context: Context, + source, + targetUri: str = None, + targetType: str = None, + input: dict = None, +): + with context.engine.scoped_session() as session: + m = FeedMessage( + targetUri=targetUri, + targetType=targetType, + creator=context.username, + content=input.get('content'), + ) + session.add(m) + return m + + +def resolve_messages(context: Context, source: Feed, filter: dict = None): + if not source: + return None + if not filter: + filter = {} + with context.engine.scoped_session() as session: + q = session.query(FeedMessage).filter( + FeedMessage.targetUri == source.targetUri + ) + term = filter.get('term') + if term: + q = q.filter( + or_( + FeedMessage.content.ilike('%' + term + '%'), + FeedMessage.creator.ilike('%' + term + '%'), + ) + ) + q = q.order_by(FeedMessage.created.desc()) + + return paginate( + q, page=filter.get('page', 1), page_size=filter.get('pageSize', 10) + ).to_dict() diff --git a/backend/dataall/modules/feed/api/types.py b/backend/dataall/modules/feed/api/types.py new file mode 100644 index 000000000..d770dc908 --- /dev/null +++ b/backend/dataall/modules/feed/api/types.py @@ -0,0 +1,49 @@ +from dataall.base.api import gql +from dataall.modules.feed.api.resolvers import resolve_feed_target_type, resolve_messages, resolve_target +from dataall.modules.feed.api.registry import FeedRegistry + + +FeedTarget = gql.Union( + name='FeedTarget', + type_registry=FeedRegistry, + resolver=resolve_feed_target_type, +) + +Feed = gql.ObjectType( + name='Feed', + fields=[ + gql.Field(name='feedTargetUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='feedTargetType', type=gql.NonNullableType(gql.String)), + gql.Field(name='target', resolver=resolve_target, type=gql.Ref('FeedTarget')), + gql.Field( + name='messages', + args=[gql.Argument(name='filter', type=gql.Ref('FeedMessageFilter'))], + resolver=resolve_messages, + type=gql.Ref('FeedMessages'), + ), + ], +) + + +FeedMessage = gql.ObjectType( + name='FeedMessage', + fields=[ + gql.Field(name='feedMessageUri', type=gql.ID), + gql.Field(name='creator', type=gql.NonNullableType(gql.String)), + gql.Field(name='content', type=gql.String), + gql.Field(name='created', type=gql.String), + ], +) + + +FeedMessages = gql.ObjectType( + name='FeedMessages', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('FeedMessage'))), + ], +) diff --git a/backend/dataall/modules/feed/db/__init__.py b/backend/dataall/modules/feed/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/feed/db/feed_models.py b/backend/dataall/modules/feed/db/feed_models.py new file mode 100644 index 000000000..2e1efaad8 --- /dev/null +++ b/backend/dataall/modules/feed/db/feed_models.py @@ -0,0 +1,15 @@ +from datetime import datetime + +from sqlalchemy import Column, String, DateTime + +from dataall.base.db import Base, utils + + +class FeedMessage(Base): + __tablename__ = 'feed_message' + feedMessageUri = Column(String, primary_key=True, default=utils.uuid('_')) + creator = Column(String, nullable=False) + created = Column(DateTime, nullable=False, default=datetime.now) + content = Column(String, nullable=True) + targetUri = Column(String, nullable=False) + targetType = Column(String, nullable=False) diff --git a/backend/dataall/modules/mlstudio/__init__.py b/backend/dataall/modules/mlstudio/__init__.py new file mode 100644 index 000000000..3d77248ac --- /dev/null +++ b/backend/dataall/modules/mlstudio/__init__.py @@ -0,0 +1,39 @@ +"""Contains the code related to SageMaker ML Studio user profiles""" +import logging + +from dataall.base.loader import ImportMode, ModuleInterface +from dataall.core.stacks.db.target_type_repositories import TargetType +from dataall.modules.mlstudio.db.mlstudio_repositories import SageMakerStudioRepository + +log = logging.getLogger(__name__) + + +class MLStudioApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for MLStudio GraphQl lambda""" + + @classmethod + def is_supported(cls, modes): + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.mlstudio.api + from dataall.modules.mlstudio.services.mlstudio_permissions import GET_SGMSTUDIO_USER, UPDATE_SGMSTUDIO_USER + TargetType("mlstudio", GET_SGMSTUDIO_USER, UPDATE_SGMSTUDIO_USER) + + log.info("API of sagemaker mlstudio has been imported") + + +class MLStudioCdkModuleInterface(ModuleInterface): + """Implements ModuleInterface for MLStudio ecs tasks""" + + @classmethod + def is_supported(cls, modes): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.mlstudio.cdk + from dataall.core.environment.cdk.environment_stack import EnvironmentSetup + from dataall.modules.mlstudio.cdk.mlstudio_stack import SageMakerDomainExtension + + EnvironmentSetup.register(SageMakerDomainExtension) + log.info("API of sagemaker mlstudio has been imported") diff --git a/backend/dataall/modules/mlstudio/api/__init__.py b/backend/dataall/modules/mlstudio/api/__init__.py new file mode 100644 index 000000000..90d238879 --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/__init__.py @@ -0,0 +1,4 @@ +"""The package defines the schema for SageMaker ML Studio""" +from dataall.modules.mlstudio.api import input_types, mutations, queries, types, resolvers + +__all__ = ["types", "input_types", "queries", "mutations", "resolvers"] diff --git a/backend/dataall/modules/mlstudio/api/enums.py b/backend/dataall/modules/mlstudio/api/enums.py new file mode 100644 index 000000000..2c248849f --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/enums.py @@ -0,0 +1,10 @@ +"""Contains the enums GraphQL mapping for SageMaker ML Studio""" +from dataall.base.api.constants import GraphQLEnumMapper + + +class SagemakerStudioRole(GraphQLEnumMapper): + """Describes the SageMaker ML Studio roles""" + + Creator = '950' + Admin = '900' + NoPermission = '000' diff --git a/backend/dataall/modules/mlstudio/api/input_types.py b/backend/dataall/modules/mlstudio/api/input_types.py new file mode 100644 index 000000000..f05fd53f6 --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/input_types.py @@ -0,0 +1,35 @@ +"""The module defines GraphQL input types for the SageMaker ML Studio""" +from dataall.base.api import gql + +NewSagemakerStudioUserInput = gql.InputType( + name='NewSagemakerStudioUserInput', + arguments=[ + gql.Argument('label', gql.NonNullableType(gql.String)), + gql.Argument('description', gql.String), + gql.Argument('environmentUri', gql.NonNullableType(gql.String)), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('topics', gql.String), + gql.Argument('SamlAdminGroupName', gql.NonNullableType(gql.String)), + ], +) + +ModifySagemakerStudioUserInput = gql.InputType( + name='ModifySagemakerStudioUserInput', + arguments=[ + gql.Argument('label', gql.String), + gql.Argument('tags', gql.ArrayType(gql.String)), + gql.Argument('description', gql.String), + ], +) + +SagemakerStudioUserFilter = gql.InputType( + name='SagemakerStudioUserFilter', + arguments=[ + gql.Argument('term', gql.String), + gql.Argument('page', gql.Integer), + gql.Argument('pageSize', gql.Integer), + gql.Argument('sort', gql.String), + gql.Argument('limit', gql.Integer), + gql.Argument('offset', gql.Integer), + ], +) diff --git a/backend/dataall/modules/mlstudio/api/mutations.py b/backend/dataall/modules/mlstudio/api/mutations.py new file mode 100644 index 000000000..abcc3cc99 --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/mutations.py @@ -0,0 +1,31 @@ +"""The module defines GraphQL mutations for the SageMaker ML Studio""" +from dataall.base.api import gql +from dataall.modules.mlstudio.api.resolvers import ( + create_sagemaker_studio_user, + delete_sagemaker_studio_user, +) + +createSagemakerStudioUser = gql.MutationField( + name='createSagemakerStudioUser', + args=[ + gql.Argument( + name='input', + type=gql.NonNullableType(gql.Ref('NewSagemakerStudioUserInput')), + ) + ], + type=gql.Ref('SagemakerStudioUser'), + resolver=create_sagemaker_studio_user, +) + +deleteSagemakerStudioUser = gql.MutationField( + name='deleteSagemakerStudioUser', + args=[ + gql.Argument( + name='sagemakerStudioUserUri', + type=gql.NonNullableType(gql.String), + ), + gql.Argument(name='deleteFromAWS', type=gql.Boolean), + ], + type=gql.String, + resolver=delete_sagemaker_studio_user, +) diff --git a/backend/dataall/modules/mlstudio/api/queries.py b/backend/dataall/modules/mlstudio/api/queries.py new file mode 100644 index 000000000..457559def --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/queries.py @@ -0,0 +1,36 @@ +"""The module defines GraphQL queries for the SageMaker ML Studio""" +from dataall.base.api import gql +from dataall.modules.mlstudio.api.resolvers import ( + get_sagemaker_studio_user, + list_sagemaker_studio_users, + get_sagemaker_studio_user_presigned_url, +) + +getSagemakerStudioUser = gql.QueryField( + name='getSagemakerStudioUser', + args=[ + gql.Argument( + name='sagemakerStudioUserUri', type=gql.NonNullableType(gql.String) + ) + ], + type=gql.Ref('SagemakerStudioUser'), + resolver=get_sagemaker_studio_user, +) + +listSagemakerStudioUsers = gql.QueryField( + name='listSagemakerStudioUsers', + args=[gql.Argument('filter', gql.Ref('SagemakerStudioUserFilter'))], + type=gql.Ref('SagemakerStudioUserSearchResult'), + resolver=list_sagemaker_studio_users, +) + +getSagemakerStudioUserPresignedUrl = gql.QueryField( + name='getSagemakerStudioUserPresignedUrl', + args=[ + gql.Argument( + name='sagemakerStudioUserUri', type=gql.NonNullableType(gql.String) + ) + ], + type=gql.String, + resolver=get_sagemaker_studio_user_presigned_url, +) diff --git a/backend/dataall/modules/mlstudio/api/resolvers.py b/backend/dataall/modules/mlstudio/api/resolvers.py new file mode 100644 index 000000000..63dc25ed7 --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/resolvers.py @@ -0,0 +1,137 @@ +import logging + +from dataall.base.api.context import Context +from dataall.core.stacks.api import stack_helper +from dataall.base.db import exceptions +from dataall.modules.mlstudio.api.enums import SagemakerStudioRole +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser +from dataall.modules.mlstudio.services.mlstudio_service import SagemakerStudioService, SagemakerStudioCreationRequest + +log = logging.getLogger(__name__) + + +class RequestValidator: + """Aggregates all validation logic for operating with mlstudio""" + @staticmethod + def required_uri(uri): + if not uri: + raise exceptions.RequiredParameter('URI') + + @staticmethod + def validate_creation_request(data): + required = RequestValidator._required + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('label'): + raise exceptions.RequiredParameter('name') + + required(data, "environmentUri") + required(data, "SamlAdminGroupName") + + @staticmethod + def _required(data: dict, name: str): + if not data.get(name): + raise exceptions.RequiredParameter(name) + + +def create_sagemaker_studio_user(context: Context, source, input: dict = None): + """Creates a SageMaker Studio user. Deploys the SageMaker Studio user stack into AWS""" + RequestValidator.validate_creation_request(input) + request = SagemakerStudioCreationRequest.from_dict(input) + return SagemakerStudioService.create_sagemaker_studio_user( + uri=input["environmentUri"], + admin_group=input["SamlAdminGroupName"], + request=request + ) + + +def list_sagemaker_studio_users(context, source, filter: dict = None): + """ + Lists all SageMaker Studio users using the given filter. + If the filter is not provided, all users are returned. + """ + if not filter: + filter = {} + return SagemakerStudioService.list_sagemaker_studio_users(filter=filter) + + +def get_sagemaker_studio_user( + context, source, sagemakerStudioUserUri: str = None +) -> SagemakerStudioUser: + """Retrieve a SageMaker Studio user by URI.""" + RequestValidator.required_uri(sagemakerStudioUserUri) + return SagemakerStudioService.get_sagemaker_studio_user(uri=sagemakerStudioUserUri) + + +def get_sagemaker_studio_user_presigned_url( + context, + source: SagemakerStudioUser, + sagemakerStudioUserUri: str, +) -> str: + """Creates and returns a presigned url for a SageMaker Studio user""" + RequestValidator.required_uri(sagemakerStudioUserUri) + return SagemakerStudioService.get_sagemaker_studio_user_presigned_url(uri=sagemakerStudioUserUri) + + +def delete_sagemaker_studio_user( + context, + source: SagemakerStudioUser, + sagemakerStudioUserUri: str = None, + deleteFromAWS: bool = None, +): + """ + Deletes the SageMaker Studio user. + Deletes the SageMaker Studio user stack from AWS if deleteFromAWS is True + """ + RequestValidator.required_uri(sagemakerStudioUserUri) + return SagemakerStudioService.delete_sagemaker_studio_user( + uri=sagemakerStudioUserUri, + delete_from_aws=deleteFromAWS + ) + + +def resolve_user_role(context: Context, source: SagemakerStudioUser): + """ + Resolves the role of the current user in reference with the SageMaker Studio User + """ + if not source: + return None + if source.owner == context.username: + return SagemakerStudioRole.Creator.value + elif context.groups and source.SamlAdminGroupName in context.groups: + return SagemakerStudioRole.Admin.value + return SagemakerStudioRole.NoPermission.value + + +def resolve_sagemaker_studio_user_status(context, source: SagemakerStudioUser, **kwargs): + """ + Resolves the status of the SageMaker Studio User + """ + if not source: + return None + return SagemakerStudioService.get_sagemaker_studio_user_status( + uri=source.sagemakerStudioUserUri + ) + + +def resolve_sagemaker_studio_user_stack( + context: Context, source: SagemakerStudioUser, **kwargs +): + """ + Resolves the status of the CloudFormation stack of the SageMaker Studio User + """ + if not source: + return None + return stack_helper.get_stack_with_cfn_resources( + targetUri=source.sagemakerStudioUserUri, + environmentUri=source.environmentUri, + ) + + +def resolve_sagemaker_studio_user_applications(context, source: SagemakerStudioUser): + """ + Resolves the applications created with this SageMaker Studio User + """ + if not source: + return None + return SagemakerStudioService.get_sagemaker_studio_user_applications(uri=source.sagemakerStudioUserUri) diff --git a/backend/dataall/modules/mlstudio/api/types.py b/backend/dataall/modules/mlstudio/api/types.py new file mode 100644 index 000000000..21290711e --- /dev/null +++ b/backend/dataall/modules/mlstudio/api/types.py @@ -0,0 +1,81 @@ +"""Defines the object types of the SageMaker ML Studio""" +from dataall.base.api import gql +from dataall.modules.mlstudio.api.resolvers import ( + resolve_user_role, + resolve_sagemaker_studio_user_status, + resolve_sagemaker_studio_user_stack, + resolve_sagemaker_studio_user_applications, +) +from dataall.modules.mlstudio.api.enums import SagemakerStudioRole + + +from dataall.core.organizations.api.resolvers import resolve_organization_by_env +from dataall.core.environment.api.resolvers import resolve_environment + + +SagemakerStudioUserApps = gql.ArrayType( + gql.ObjectType( + name='SagemakerStudioUserApps', + fields=[ + gql.Field(name='DomainId', type=gql.String), + gql.Field(name='UserName', type=gql.String), + gql.Field(name='AppType', type=gql.String), + gql.Field(name='AppName', type=gql.String), + gql.Field(name='Status', type=gql.String), + ], + ) +) + +SagemakerStudioUser = gql.ObjectType( + name='SagemakerStudioUser', + fields=[ + gql.Field(name='sagemakerStudioUserUri', type=gql.ID), + gql.Field(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Field(name='label', type=gql.String), + gql.Field(name='description', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='name', type=gql.String), + gql.Field(name='owner', type=gql.String), + gql.Field(name='created', type=gql.String), + gql.Field(name='updated', type=gql.String), + gql.Field(name='SamlAdminGroupName', type=gql.String), + gql.Field( + name='userRoleForSagemakerStudioUser', + type=SagemakerStudioRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field( + name='sagemakerStudioUserStatus', + type=gql.String, + resolver=resolve_sagemaker_studio_user_status, + ), + gql.Field( + name='sagemakerStudioUserApps', + type=SagemakerStudioUserApps, + resolver=resolve_sagemaker_studio_user_applications, + ), + gql.Field( + name='environment', + type=gql.Ref('Environment'), + resolver=resolve_environment, + ), + gql.Field( + name='organization', + type=gql.Ref('Organization'), + resolver=resolve_organization_by_env, + ), + gql.Field(name='stack', type=gql.Ref('Stack'), resolver=resolve_sagemaker_studio_user_stack), + ], +) + +SagemakerStudioUserSearchResult = gql.ObjectType( + name='SagemakerStudioUserSearchResult', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(SagemakerStudioUser)), + ], +) diff --git a/backend/dataall/modules/mlstudio/aws/__init__.py b/backend/dataall/modules/mlstudio/aws/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/mlstudio/aws/ec2_client.py b/backend/dataall/modules/mlstudio/aws/ec2_client.py new file mode 100644 index 000000000..3dc484254 --- /dev/null +++ b/backend/dataall/modules/mlstudio/aws/ec2_client.py @@ -0,0 +1,27 @@ +import logging + +from dataall.base.aws.sts import SessionHelper + + +log = logging.getLogger(__name__) + + +class EC2: + + @staticmethod + def get_client(account_id: str, region: str, role=None): + session = SessionHelper.remote_session(accountid=account_id, role=role) + return session.client('ec2', region_name=region) + + @staticmethod + def check_default_vpc_exists(AwsAccountId: str, region: str, role=None): + log.info("Check that default VPC exists..") + client = EC2.get_client(account_id=AwsAccountId, region=region, role=role) + response = client.describe_vpcs( + Filters=[{'Name': 'isDefault', 'Values': ['true']}] + ) + vpcs = response['Vpcs'] + log.info(f"Default VPCs response: {vpcs}") + if vpcs: + return True + return False diff --git a/backend/dataall/modules/mlstudio/aws/sagemaker_studio_client.py b/backend/dataall/modules/mlstudio/aws/sagemaker_studio_client.py new file mode 100644 index 000000000..2a82806ea --- /dev/null +++ b/backend/dataall/modules/mlstudio/aws/sagemaker_studio_client.py @@ -0,0 +1,99 @@ +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser +from botocore.exceptions import ClientError + +logger = logging.getLogger(__name__) + + +def get_client(AwsAccountId, region): + session = SessionHelper.remote_session(AwsAccountId) + return session.client('sagemaker', region_name=region) + + +def get_sagemaker_studio_domain(AwsAccountId, region): + """ + Sagemaker studio domain is limited to 5 per account/region + RETURN: an existing domain or None if no domain is in the AWS account + """ + client = get_client(AwsAccountId=AwsAccountId, region=region) + existing_domain = dict() + try: + domain_id_paginator = client.get_paginator('list_domains') + domains = domain_id_paginator.paginate() + for _domain in domains: + print(_domain) + for _domain in _domain.get('Domains'): + # Get the domain name created by dataall + if 'dataall' in _domain: + return _domain + else: + existing_domain = _domain + return existing_domain + except ClientError as e: + print(e) + return 'NotFound' + + +class SagemakerStudioClient: + """A Sagemaker studio proxy client that is used to send requests to AWS""" + def __init__(self, sm_user: SagemakerStudioUser): + self._client = get_client( + AwsAccountId=sm_user.AWSAccountId, + region=sm_user.region + ) + self._sagemakerStudioDomainID = sm_user.sagemakerStudioDomainID + self._sagemakerStudioUserNameSlugify = sm_user.sagemakerStudioUserNameSlugify + + def get_sagemaker_studio_user_presigned_url(self): + try: + response_signed_url = self._client.create_presigned_domain_url( + DomainId=self._sagemakerStudioDomainID, + UserProfileName=self._sagemakerStudioUserNameSlugify, + ) + return response_signed_url['AuthorizedUrl'] + except ClientError: + return '' + + def get_sagemaker_studio_user_status(self): + try: + response = self._client.describe_user_profile( + DomainId=self._sagemakerStudioDomainID, + UserProfileName=self._sagemakerStudioUserNameSlugify, + ) + return response['Status'] + except ClientError as e: + logger.error( + f'Could not retrieve Studio user {self._sagemakerStudioUserNameSlugify} status due to: {e} ' + ) + return 'NOT FOUND' + + def get_sagemaker_studio_user_applications(self): + _running_apps = [] + try: + paginator_app = self._client.get_paginator('list_apps') + response_paginator = paginator_app.paginate( + DomainIdEquals=self._sagemakerStudioDomainID, + UserProfileNameEquals=self._sagemakerStudioUserNameSlugify, + ) + for _response_app in response_paginator: + for _app in _response_app['Apps']: + if _app.get('Status') not in ['Deleted']: + _running_apps.append( + dict( + DomainId=_app.get('DomainId'), + UserProfileName=_app.get('UserProfileName'), + AppType=_app.get('AppType'), + AppName=_app.get('AppName'), + Status=_app.get('Status'), + ) + ) + return _running_apps + except ClientError as e: + raise e + + +def sagemaker_studio_client(sm_user: SagemakerStudioUser) -> SagemakerStudioClient: + """Factory method to retrieve the client to send request to AWS""" + return SagemakerStudioClient(sm_user) diff --git a/backend/dataall/modules/mlstudio/cdk/__init__.py b/backend/dataall/modules/mlstudio/cdk/__init__.py new file mode 100644 index 000000000..7c6931c4c --- /dev/null +++ b/backend/dataall/modules/mlstudio/cdk/__init__.py @@ -0,0 +1,11 @@ +""" +This package contains modules that are used to create a CloudFormation stack in AWS. +The code is invoked in ECS Fargate to initialize the creation of the stack +""" +from dataall.modules.mlstudio.cdk import mlstudio_stack, env_role_mlstudio_policy, pivot_role_mlstudio_policy + +__all__ = [ + "mlstudio_stack", + "env_role_mlstudio_policy", + "pivot_role_mlstudio_policy" +] diff --git a/backend/dataall/cdkproxy/cfnstacks/sagemaker-user-template.yaml b/backend/dataall/modules/mlstudio/cdk/cfnstacks/sagemaker-user-template.yaml similarity index 100% rename from backend/dataall/cdkproxy/cfnstacks/sagemaker-user-template.yaml rename to backend/dataall/modules/mlstudio/cdk/cfnstacks/sagemaker-user-template.yaml diff --git a/backend/dataall/modules/mlstudio/cdk/env_role_mlstudio_policy.py b/backend/dataall/modules/mlstudio/cdk/env_role_mlstudio_policy.py new file mode 100644 index 000000000..7a47c32a0 --- /dev/null +++ b/backend/dataall/modules/mlstudio/cdk/env_role_mlstudio_policy.py @@ -0,0 +1,148 @@ +from aws_cdk import aws_iam as iam +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.mlstudio.services.mlstudio_permissions import CREATE_SGMSTUDIO_USER + + +class SagemakerStudioPolicy(ServicePolicy): + """ + Creates a sagemaker policy for accessing and interacting with SageMaker Studio + """ + # TODO (in cleanup tasks): Remove those policies that are only needed for Notebooks, right now we have both + def get_statements(self, group_permissions, **kwargs): + if CREATE_SGMSTUDIO_USER not in group_permissions: + return [] + + return [ + iam.PolicyStatement( + actions=[ + 'sagemaker:List*', + 'sagemaker:List*', + 'sagemaker:Describe*', + 'sagemaker:BatchGet*', + 'sagemaker:BatchDescribe*', + 'sagemaker:Search', + 'sagemaker:RenderUiTemplate', + 'sagemaker:GetSearchSuggestions', + 'sagemaker:QueryLineage', + 'sagemaker:CreateNotebookInstanceLifecycleConfig', + 'sagemaker:DeleteNotebookInstanceLifecycleConfig', + 'sagemaker:CreatePresignedDomainUrl' + ], + resources=['*'], + ), + iam.PolicyStatement( + actions=['sagemaker:AddTags'], + resources=['*'], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + iam.PolicyStatement( + actions=['sagemaker:Delete*'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:algorithm/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:endpoint/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:endpoint-config/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment-trial/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment-group/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model-bias-job-definition/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model-package/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model-package-group/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model-quality-job-definition/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:monitoring-schedule/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:pipeline/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:project/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:app/*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + iam.PolicyStatement( + actions=['sagemaker:CreateApp'], + resources=['*'] + ), + iam.PolicyStatement( + actions=['sagemaker:Create*'], + resources=['*'], + ), + iam.PolicyStatement( + actions=['sagemaker:Start*', 'sagemaker:Stop*'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:monitoring-schedule/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:pipeline/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:training-job/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:processing-job/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:hyper-parameter-tuning-job/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:transform-job/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:automl-job/*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + iam.PolicyStatement( + actions=['sagemaker:Update*'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance-lifecycle-config/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:studio-lifecycle-config/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:endpoint/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:pipeline/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:pipeline-execution/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:monitoring-schedule/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment-trial/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:experiment-trial-component/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:model-package/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:training-job/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:project/*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + iam.PolicyStatement( + actions=['sagemaker:InvokeEndpoint', 'sagemaker:InvokeEndpointAsync'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:endpoint/*' + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + iam.PolicyStatement( + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents'], + resources=[ + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*', + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*:log-stream:*', + ] + ), + iam.PolicyStatement( + actions=[ + 'ecr:GetAuthorizationToken', + 'ecr:BatchCheckLayerAvailability', + 'ecr:GetDownloadUrlForLayer', + 'ecr:BatchGetImage'], + resources=[ + '*' + ] + ) + ] diff --git a/backend/dataall/modules/mlstudio/cdk/mlstudio_stack.py b/backend/dataall/modules/mlstudio/cdk/mlstudio_stack.py new file mode 100644 index 000000000..f1de861da --- /dev/null +++ b/backend/dataall/modules/mlstudio/cdk/mlstudio_stack.py @@ -0,0 +1,300 @@ +"""" +Creates a CloudFormation stack for SageMaker Studio users using cdk +""" +import logging +import os + +from aws_cdk import ( + cloudformation_include as cfn_inc, + aws_ec2 as ec2, + aws_iam as iam, + aws_kms as kms, + aws_logs as logs, + aws_sagemaker as sagemaker, + aws_ssm as ssm, + RemovalPolicy, + Stack +) +from botocore.exceptions import ClientError + +from dataall.base.aws.parameter_store import ParameterStoreManager +from dataall.base.aws.sts import SessionHelper +from dataall.core.environment.cdk.environment_stack import EnvironmentSetup, EnvironmentStackExtension +from dataall.base.cdkproxy.stacks.manager import stack +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.stacks.services.runtime_stacks_tagging import TagsUtil +from dataall.base.db import Engine, get_engine +from dataall.modules.mlstudio.aws.ec2_client import EC2 +from dataall.modules.mlstudio.aws.sagemaker_studio_client import get_sagemaker_studio_domain +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser +from dataall.base.utils.cdk_nag_utils import CDKNagUtil + +logger = logging.getLogger(__name__) + + +class SageMakerDomainExtension(EnvironmentStackExtension): + + @staticmethod + def extent(setup: EnvironmentSetup): + _environment = setup.environment() + with setup.get_engine().scoped_session() as session: + enabled = EnvironmentService.get_boolean_env_param(session, _environment, "mlStudiosEnabled") + if not enabled: + return + + sagemaker_principals = [setup.default_role] + setup.group_roles + logger.info(f'Creating SageMaker base resources for sagemaker_principals = {sagemaker_principals}..') + cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( + accountid=_environment.AwsAccountId, region=_environment.region + ) + existing_default_vpc = EC2.check_default_vpc_exists( + AwsAccountId=_environment.AwsAccountId, region=_environment.region, role=cdk_look_up_role_arn + ) + if existing_default_vpc: + logger.info("Using default VPC for Sagemaker Studio domain") + # Use default VPC - initial configuration (to be migrated) + vpc = ec2.Vpc.from_lookup(setup, 'VPCStudio', is_default=True) + subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] + subnet_ids += [public_subnet.subnet_id for public_subnet in vpc.public_subnets] + subnet_ids += [isolated_subnet.subnet_id for isolated_subnet in vpc.isolated_subnets] + security_groups = [] + else: + logger.info("Default VPC not found, Exception. Creating a VPC for SageMaker resources...") + # Create VPC with 3 Public Subnets and 3 Private subnets wit NAT Gateways + log_group = logs.LogGroup( + setup, + f'SageMakerStudio{_environment.name}', + log_group_name=f'/{_environment.resourcePrefix}/{_environment.name}/vpc/sagemakerstudio', + retention=logs.RetentionDays.ONE_MONTH, + removal_policy=RemovalPolicy.DESTROY, + ) + vpc_flow_role = iam.Role( + setup, 'FlowLog', + assumed_by=iam.ServicePrincipal('vpc-flow-logs.amazonaws.com') + ) + vpc = ec2.Vpc( + setup, + "SageMakerVPC", + max_azs=3, + cidr="10.10.0.0/16", + subnet_configuration=[ + ec2.SubnetConfiguration( + subnet_type=ec2.SubnetType.PUBLIC, + name="Public", + cidr_mask=24 + ), + ec2.SubnetConfiguration( + subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT, + name="Private", + cidr_mask=24 + ), + ], + enable_dns_hostnames=True, + enable_dns_support=True, + ) + ec2.FlowLog( + setup, "StudioVPCFlowLog", + resource_type=ec2.FlowLogResourceType.from_vpc(vpc), + destination=ec2.FlowLogDestination.to_cloud_watch_logs(log_group, vpc_flow_role) + ) + # setup security group to be used for sagemaker studio domain + sagemaker_sg = ec2.SecurityGroup( + setup, + "SecurityGroup", + vpc=vpc, + description="Security Group for SageMaker Studio", + ) + + sagemaker_sg.add_ingress_rule(sagemaker_sg, ec2.Port.all_traffic()) + security_groups = [sagemaker_sg.security_group_id] + subnet_ids = [private_subnet.subnet_id for private_subnet in vpc.private_subnets] + + vpc_id = vpc.vpc_id + + sagemaker_domain_role = iam.Role( + setup, + 'RoleForSagemakerStudioUsers', + assumed_by=iam.ServicePrincipal('sagemaker.amazonaws.com'), + role_name='RoleSagemakerStudioUsers', + managed_policies=[ + iam.ManagedPolicy.from_managed_policy_arn( + setup, + id='SagemakerFullAccess', + managed_policy_arn='arn:aws:iam::aws:policy/AmazonSageMakerFullAccess', + ), + iam.ManagedPolicy.from_managed_policy_arn( + setup, id='S3FullAccess', managed_policy_arn='arn:aws:iam::aws:policy/AmazonS3FullAccess' + ), + ], + ) + + sagemaker_domain_key = kms.Key( + setup, + 'SagemakerDomainKmsKey', + alias='SagemakerStudioDomain', + enable_key_rotation=True, + admins=[ + iam.ArnPrincipal(_environment.CDKRoleArn) + ], + policy=iam.PolicyDocument( + assign_sids=True, + statements=[ + iam.PolicyStatement( + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + "kms:CreateGrant" + ], + effect=iam.Effect.ALLOW, + principals=[ + sagemaker_domain_role, + iam.ArnPrincipal(_environment.CDKRoleArn) + ] + sagemaker_principals, + resources=["*"], + conditions={ + "StringEquals": { + "kms:ViaService": [ + f"sagemaker.{_environment.region}.amazonaws.com", + f"elasticfilesystem.{_environment.region}.amazonaws.com", + f"ec2.{_environment.region}.amazonaws.com", + f"s3.{_environment.region}.amazonaws.com" + ] + } + } + ), + iam.PolicyStatement( + actions=[ + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ], + effect=iam.Effect.ALLOW, + principals=[ + sagemaker_domain_role, + ] + sagemaker_principals, + resources=["*"], + ) + ], + ), + ) + + sagemaker_domain = sagemaker.CfnDomain( + setup, + 'SagemakerStudioDomain', + domain_name=f'SagemakerStudioDomain-{_environment.region}-{_environment.AwsAccountId}', + auth_mode='IAM', + default_user_settings=sagemaker.CfnDomain.UserSettingsProperty( + execution_role=sagemaker_domain_role.role_arn, + security_groups=security_groups, + sharing_settings=sagemaker.CfnDomain.SharingSettingsProperty( + notebook_output_option='Allowed', + s3_kms_key_id=sagemaker_domain_key.key_id, + s3_output_path=f's3://sagemaker-{_environment.region}-{_environment.AwsAccountId}', + ), + ), + vpc_id=vpc_id, + subnet_ids=subnet_ids, + app_network_access_type='VpcOnly', + kms_key_id=sagemaker_domain_key.key_id, + ) + + ssm.StringParameter( + setup, + 'SagemakerStudioDomainId', + string_value=sagemaker_domain.attr_domain_id, + parameter_name=f'/dataall/{_environment.environmentUri}/sagemaker/sagemakerstudio/domain_id', + ) + return sagemaker_domain + + @staticmethod + def check_existing_sagemaker_studio_domain(environment): + logger.info('Check if there is an existing sagemaker studio domain in the account') + try: + logger.info('check sagemaker studio domain created as part of data.all environment stack.') + cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn( + accountid=environment.AwsAccountId, region=environment.region + ) + dataall_created_domain = ParameterStoreManager.client( + AwsAccountId=environment.AwsAccountId, region=environment.region, role=cdk_look_up_role_arn + ).get_parameter(Name=f'/dataall/{environment.environmentUri}/sagemaker/sagemakerstudio/domain_id') + return False + except ClientError as e: + logger.info(f'check sagemaker studio domain created outside of data.all. Parameter data.all not found: {e}') + existing_domain = get_sagemaker_studio_domain( + AwsAccountId=environment.AwsAccountId, region=environment.region, role=cdk_look_up_role_arn + ) + return existing_domain.get('DomainId', False) + + +@stack(stack='mlstudio') +class SagemakerStudioUserProfile(Stack): + """ + Creation of a sagemaker studio user stack. + Having imported the mlstudio module, the class registers itself using @stack + Then it will be reachable by HTTP request / using SQS from GraphQL lambda + """ + module_name = __file__ + + def get_engine(self) -> Engine: + envname = os.environ.get('envname', 'local') + engine = get_engine(envname=envname) + return engine + + def get_target(self, target_uri) -> SagemakerStudioUser: + engine = self.get_engine() + with engine.scoped_session() as session: + sm_user = session.query(SagemakerStudioUser).get( + target_uri + ) + return sm_user + + def get_env_group( + self, sm_user: SagemakerStudioUser + ) -> EnvironmentGroup: + engine = self.get_engine() + with engine.scoped_session() as session: + env_group = EnvironmentService.get_environment_group( + session, sm_user.SamlAdminGroupName, sm_user.environmentUri, + ) + return env_group + + def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: + super().__init__(scope, + id, + description="Cloud formation stack of SM STUDIO USER: {}; URI: {}; DESCRIPTION: {}".format( + self.get_target(target_uri=target_uri).label, + target_uri, + self.get_target(target_uri=target_uri).description, + )[:1024], + **kwargs) + # Required for dynamic stack tagging + self.target_uri = target_uri + sm_user: SagemakerStudioUser = self.get_target(target_uri=self.target_uri) + print(f"sm_user= {sm_user}") + env_group = self.get_env_group(sm_user) + cfn_template_user = os.path.join( + os.path.dirname(__file__), 'cfnstacks', 'sagemaker-user-template.yaml' + ) + print(f"path:{cfn_template_user}") + user_parameters = dict( + sagemaker_domain_id=sm_user.sagemakerStudioDomainID, + user_profile_name=sm_user.sagemakerStudioUserNameSlugify, + execution_role=env_group.environmentIAMRoleArn, + ) + logger.info(f'Creating the SageMaker Studio user {user_parameters}') + my_sagemaker_studio_user_template = cfn_inc.CfnInclude( + self, + f'SagemakerStudioUser{self.target_uri}', + template_file=cfn_template_user, + parameters=user_parameters, + ) + self.sm_user_arn = ( + my_sagemaker_studio_user_template.get_resource('SagemakerUser') + .get_att('UserProfileArn') + .to_string() + ) + TagsUtil.add_tags(stack=self, model=SagemakerStudioUser, target_type="smstudiouser") + CDKNagUtil.check_rules(self) diff --git a/backend/dataall/modules/mlstudio/cdk/pivot_role_mlstudio_policy.py b/backend/dataall/modules/mlstudio/cdk/pivot_role_mlstudio_policy.py new file mode 100644 index 000000000..5af9e1740 --- /dev/null +++ b/backend/dataall/modules/mlstudio/cdk/pivot_role_mlstudio_policy.py @@ -0,0 +1,51 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class MLStudioPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS SageMaker. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='SageMakerDomainActions', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListTags', + 'sagemaker:DescribeUserProfile', + 'sagemaker:AddTags', + 'sagemaker:DescribeDomain', + 'sagemaker:CreatePresignedDomainUrl', + ], + resources=[ + f'arn:aws:sagemaker:*:{self.account}:domain/*', + f'arn:aws:sagemaker:*:{self.account}:user-profile/*/*', + ], + ), + iam.PolicyStatement( + sid='SageMakerDomainsAppsList', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListDomains', + 'sagemaker:ListApps', + 'sagemaker:DeleteApp', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='EC2SGMLStudio', + effect=iam.Effect.ALLOW, + actions=[ + 'ec2:DescribeSubnets', + 'ec2:DescribeSecurityGroups', + 'ec2:DescribeVpcs', + 'ec2:DescribeInstances', + 'ec2:DescribeNetworkInterfaces', + ], + resources=['*'], + ), + ] + return statements diff --git a/backend/dataall/modules/mlstudio/db/__init__.py b/backend/dataall/modules/mlstudio/db/__init__.py new file mode 100644 index 000000000..86631d191 --- /dev/null +++ b/backend/dataall/modules/mlstudio/db/__init__.py @@ -0,0 +1 @@ +"""Contains a code to that interacts with the database""" diff --git a/backend/dataall/modules/mlstudio/db/mlstudio_models.py b/backend/dataall/modules/mlstudio/db/mlstudio_models.py new file mode 100644 index 000000000..032826588 --- /dev/null +++ b/backend/dataall/modules/mlstudio/db/mlstudio_models.py @@ -0,0 +1,42 @@ +"""ORM models for sagemaker studio""" + +from sqlalchemy import Column, String, ForeignKey +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base +from dataall.base.db import Resource, utils + + +class SagemakerStudioDomain(Resource, Base): + """Describes ORM model for sagemaker ML Studio domain""" + __tablename__ = 'sagemaker_studio_domain' + environmentUri = Column(String, nullable=False) + sagemakerStudioUri = Column( + String, primary_key=True, default=utils.uuid('sagemakerstudio') + ) + sagemakerStudioDomainID = Column(String, nullable=False) + SagemakerStudioStatus = Column(String, nullable=False) + AWSAccountId = Column(String, nullable=False) + RoleArn = Column(String, nullable=False) + region = Column(String, default='eu-west-1') + userRoleForSagemakerStudio = query_expression() + + +class SagemakerStudioUser(Resource, Base): + """Describes ORM model for sagemaker ML Studio user""" + __tablename__ = 'sagemaker_studio_user_profile' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + sagemakerStudioUserUri = Column( + String, primary_key=True, default=utils.uuid('sagemakerstudiouserprofile') + ) + sagemakerStudioUserStatus = Column(String, nullable=False) + sagemakerStudioUserName = Column(String, nullable=False) + sagemakerStudioUserNameSlugify = Column( + String, nullable=False, default=utils.slugifier('label') + ) + sagemakerStudioDomainID = Column(String, nullable=False) + AWSAccountId = Column(String, nullable=False) + RoleArn = Column(String, nullable=False) + region = Column(String, default='eu-west-1') + SamlAdminGroupName = Column(String, nullable=True) + userRoleForSagemakerStudioUser = query_expression() diff --git a/backend/dataall/modules/mlstudio/db/mlstudio_repositories.py b/backend/dataall/modules/mlstudio/db/mlstudio_repositories.py new file mode 100644 index 000000000..763ca6f92 --- /dev/null +++ b/backend/dataall/modules/mlstudio/db/mlstudio_repositories.py @@ -0,0 +1,69 @@ +""" +DAO layer that encapsulates the logic and interaction with the database for ML Studio +Provides the API to retrieve / update / delete ml studio +""" +from sqlalchemy import or_ +from sqlalchemy.sql import and_ +from sqlalchemy.orm import Query + +from dataall.base.db import paginate +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource + + +class SageMakerStudioRepository(EnvironmentResource): + """DAO layer for ML Studio""" + _DEFAULT_PAGE = 1 + _DEFAULT_PAGE_SIZE = 10 + + def __init__(self, session): + self._session = session + + def save_sagemaker_studio_user(self, user): + """Save SageMaker Studio user to the database""" + self._session.add(user) + self._session.commit() + + def _query_user_sagemaker_studio_users(self, username, groups, filter) -> Query: + query = self._session.query(SagemakerStudioUser).filter( + or_( + SagemakerStudioUser.owner == username, + SagemakerStudioUser.SamlAdminGroupName.in_(groups), + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + SagemakerStudioUser.description.ilike( + filter.get('term') + '%%' + ), + SagemakerStudioUser.label.ilike( + filter.get('term') + '%%' + ), + ) + ) + return query + + def paginated_sagemaker_studio_users(self, username, groups, filter=None) -> dict: + """Returns a page of sagemaker studio users for a data.all user""" + return paginate( + query=self._query_user_sagemaker_studio_users(username, groups, filter), + page=filter.get('page', SageMakerStudioRepository._DEFAULT_PAGE), + page_size=filter.get('pageSize', SageMakerStudioRepository._DEFAULT_PAGE_SIZE), + ).to_dict() + + def find_sagemaker_studio_user(self, uri): + """Finds a sagemaker studio user. Returns None if it doesn't exist""" + return self._session.query(SagemakerStudioUser).get(uri) + + def count_resources(self, environment, group_uri): + return ( + self._session.query(SagemakerStudioUser) + .filter( + and_( + SagemakerStudioUser.environmentUri == environment.environmentUri, + SagemakerStudioUser.SamlAdminGroupName == group_uri + ) + ) + .count() + ) diff --git a/backend/dataall/modules/mlstudio/services/__init__.py b/backend/dataall/modules/mlstudio/services/__init__.py new file mode 100644 index 000000000..75f3291b1 --- /dev/null +++ b/backend/dataall/modules/mlstudio/services/__init__.py @@ -0,0 +1,7 @@ +""" +Contains the code needed for service layer. +The service layer is a layer where all business logic is aggregated +""" +from dataall.modules.mlstudio.services import mlstudio_service, mlstudio_permissions + +__all__ = ["mlstudio_service", "mlstudio_permissions"] diff --git a/backend/dataall/modules/mlstudio/services/mlstudio_permissions.py b/backend/dataall/modules/mlstudio/services/mlstudio_permissions.py new file mode 100644 index 000000000..530c41838 --- /dev/null +++ b/backend/dataall/modules/mlstudio/services/mlstudio_permissions.py @@ -0,0 +1,72 @@ +""" +Add module's permissions to the global permissions. +Contains permissions for sagemaker ML Studio +There are different types of permissions: +TENANT_PERMISSIONS + Granted to the Tenant group. For each resource we should define a corresponding MANAGE_ permission +ENVIRONMENT_PERMISSIONS + Granted to any group in an environment. For each resource we should define a list of actions regarding + that resource that are executed on the environment (e.g. List resources X in an environment) +ENVIRONMENT_INVITED_PERMISSIONS + +ENVIRONMENT_INVITATION_REQUEST + +RESOURCE_PERMISSION + Granted to any group. For each resource we should define a list of all actions that can be done on the resource. + We also need to add the permissions for the Environment resource (ENVIRONMENT_PERMISSIONS) + +""" + +from dataall.core.permissions.permissions import ( + ENVIRONMENT_ALL, + ENVIRONMENT_INVITED, + RESOURCES_ALL_WITH_DESC, + RESOURCES_ALL, + ENVIRONMENT_INVITATION_REQUEST, + TENANT_ALL, + TENANT_ALL_WITH_DESC +) + +# Definition of TENANT_PERMISSIONS for SageMaker ML Studio +MANAGE_SGMSTUDIO_USERS = 'MANAGE_SGMSTUDIO_USERS' + +TENANT_ALL.append(MANAGE_SGMSTUDIO_USERS) +TENANT_ALL_WITH_DESC[MANAGE_SGMSTUDIO_USERS] = 'Manage SageMaker Studio users' + + +# Definition of ENVIRONMENT_PERMISSIONS for SageMaker ML Studio +CREATE_SGMSTUDIO_USER = 'CREATE_SGMSTUDIO_USER' +# TODO: cleanup permissions = LIST_ENVIRONMENT_SGMSTUDIO_USERS and other LIST_ENVIRONMENT permissions +LIST_ENVIRONMENT_SGMSTUDIO_USERS = 'LIST_ENVIRONMENT_SGMSTUDIO_USERS' + +ENVIRONMENT_ALL.append(CREATE_SGMSTUDIO_USER) +ENVIRONMENT_ALL.append(LIST_ENVIRONMENT_SGMSTUDIO_USERS) +ENVIRONMENT_INVITED.append(CREATE_SGMSTUDIO_USER) +ENVIRONMENT_INVITED.append(LIST_ENVIRONMENT_SGMSTUDIO_USERS) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_SGMSTUDIO_USER) +ENVIRONMENT_INVITATION_REQUEST.append(LIST_ENVIRONMENT_SGMSTUDIO_USERS) + +# Definition of RESOURCE_PERMISSIONS for SageMaker ML Studio +GET_SGMSTUDIO_USER = 'GET_SGMSTUDIO_USER' +UPDATE_SGMSTUDIO_USER = 'UPDATE_SGMSTUDIO_USER' +DELETE_SGMSTUDIO_USER = 'DELETE_SGMSTUDIO_USER' +SGMSTUDIO_USER_URL = 'SGMSTUDIO_USER_URL' + +SGMSTUDIO_USER_ALL = [ + GET_SGMSTUDIO_USER, + UPDATE_SGMSTUDIO_USER, + DELETE_SGMSTUDIO_USER, + SGMSTUDIO_USER_URL, +] + +RESOURCES_ALL.extend(SGMSTUDIO_USER_ALL) +RESOURCES_ALL.append(CREATE_SGMSTUDIO_USER) +RESOURCES_ALL.append(LIST_ENVIRONMENT_SGMSTUDIO_USERS) + + +RESOURCES_ALL_WITH_DESC[GET_SGMSTUDIO_USER] = "General permission to get a SageMaker Studio user" +RESOURCES_ALL_WITH_DESC[UPDATE_SGMSTUDIO_USER] = "Permission to get a SageMaker Studio user" +RESOURCES_ALL_WITH_DESC[DELETE_SGMSTUDIO_USER] = "Permission to delete a SageMaker Studio user" +RESOURCES_ALL_WITH_DESC[SGMSTUDIO_USER_URL] = "Permission to generate the URL for a SageMaker Studio user" +RESOURCES_ALL_WITH_DESC[CREATE_SGMSTUDIO_USER] = "Create SageMaker Studio users on this environment" +RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SGMSTUDIO_USERS] = "List SageMaker Studio users on this environment" diff --git a/backend/dataall/modules/mlstudio/services/mlstudio_service.py b/backend/dataall/modules/mlstudio/services/mlstudio_service.py new file mode 100644 index 000000000..06750b822 --- /dev/null +++ b/backend/dataall/modules/mlstudio/services/mlstudio_service.py @@ -0,0 +1,203 @@ +""" +A service layer for ml studio +Central part for working with ml studio +""" +import dataclasses +import logging +from dataclasses import dataclass, field +from typing import List, Dict + +from dataall.base.context import get_context +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.base.db import exceptions +from dataall.modules.mlstudio.aws.sagemaker_studio_client import sagemaker_studio_client, get_sagemaker_studio_domain +from dataall.modules.mlstudio.db.mlstudio_repositories import SageMakerStudioRepository +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser +from dataall.modules.mlstudio.services.mlstudio_permissions import ( + MANAGE_SGMSTUDIO_USERS, + CREATE_SGMSTUDIO_USER, + SGMSTUDIO_USER_ALL, + GET_SGMSTUDIO_USER, + SGMSTUDIO_USER_URL, + DELETE_SGMSTUDIO_USER, +) +from dataall.base.utils import slugify + +logger = logging.getLogger(__name__) + + +@dataclass +class SagemakerStudioCreationRequest: + """A request dataclass for ml studio user profile creation. Adds default values for missed parameters""" + label: str + SamlAdminGroupName: str + environment: Dict = field(default_factory=dict) + description: str = "No description provided" + tags: List[str] = field(default_factory=list) + + @classmethod + def from_dict(cls, env): + """Copies only required fields from the dictionary and creates an instance of class""" + fields = set([f.name for f in dataclasses.fields(cls)]) + return cls(**{ + k: v for k, v in env.items() + if k in fields + }) + + +def _session(): + return get_context().db_engine.scoped_session() + + +class SagemakerStudioService: + """ + Encapsulate the logic of interactions with sagemaker ml studio. + """ + @staticmethod + @has_tenant_permission(MANAGE_SGMSTUDIO_USERS) + @has_resource_permission(CREATE_SGMSTUDIO_USER) + @has_group_permission(CREATE_SGMSTUDIO_USER) + def create_sagemaker_studio_user(*, uri: str, admin_group: str, request: SagemakerStudioCreationRequest): + """ + Creates an ML Studio user + Throws an exception if ML Studio is not enabled for the environment + Throws an exception if a SageMaker domain is not found + """ + with _session() as session: + env = EnvironmentService.get_environment_by_uri(session, uri) + enabled = EnvironmentService.get_boolean_env_param(session, env, "mlStudiosEnabled") + + if not enabled: + raise exceptions.UnauthorizedOperation( + action=CREATE_SGMSTUDIO_USER, + message=f'ML Studio feature is disabled for the environment {env.label}', + ) + response = get_sagemaker_studio_domain( + AwsAccountId=env.AwsAccountId, + region=env.region + ) + existing_domain = response.get('DomainId', False) + + if not existing_domain: + raise exceptions.AWSResourceNotAvailable( + action='Sagemaker Studio domain', + message='Update the environment stack ' + 'or create a Sagemaker studio domain on your AWS account.', + ) + + sagemaker_studio_user = SagemakerStudioUser( + label=request.label, + environmentUri=env.environmentUri, + description=request.description, + sagemakerStudioUserName=slugify(request.label, separator=''), + sagemakerStudioUserStatus='PENDING', + sagemakerStudioDomainID=existing_domain, + AWSAccountId=env.AwsAccountId, + region=env.region, + RoleArn=env.EnvironmentDefaultIAMRoleArn, + owner=get_context().username, + SamlAdminGroupName=admin_group, + tags=request.tags, + ) + SageMakerStudioRepository(session).save_sagemaker_studio_user(user=sagemaker_studio_user) + + ResourcePolicy.attach_resource_policy( + session=session, + group=request.SamlAdminGroupName, + permissions=SGMSTUDIO_USER_ALL, + resource_uri=sagemaker_studio_user.sagemakerStudioUserUri, + resource_type=SagemakerStudioUser.__name__, + ) + + if env.SamlGroupName != sagemaker_studio_user.SamlAdminGroupName: + ResourcePolicy.attach_resource_policy( + session=session, + group=env.SamlGroupName, + permissions=SGMSTUDIO_USER_ALL, + resource_uri=sagemaker_studio_user.sagemakerStudioUserUri, + resource_type=SagemakerStudioUser.__name__, + ) + + Stack.create_stack( + session=session, + environment_uri=sagemaker_studio_user.environmentUri, + target_type='mlstudio', + target_uri=sagemaker_studio_user.sagemakerStudioUserUri, + target_label=sagemaker_studio_user.label, + ) + + stack_helper.deploy_stack(targetUri=sagemaker_studio_user.sagemakerStudioUserUri) + + return sagemaker_studio_user + + @staticmethod + def list_sagemaker_studio_users(*, filter: dict) -> dict: + with _session() as session: + return SageMakerStudioRepository(session).paginated_sagemaker_studio_users( + username=get_context().username, + groups=get_context().groups, + filter=filter, + ) + + @staticmethod + @has_resource_permission(GET_SGMSTUDIO_USER) + def get_sagemaker_studio_user(*, uri: str): + with _session() as session: + return SagemakerStudioService._get_sagemaker_studio_user(session, uri) + + @staticmethod + def get_sagemaker_studio_user_status(*, uri: str): + with _session() as session: + user = SagemakerStudioService._get_sagemaker_studio_user(session, uri) + status = sagemaker_studio_client(user).get_sagemaker_studio_user_status() + user.sagemakerStudioUserStatus = status + return status + + @staticmethod + @has_resource_permission(SGMSTUDIO_USER_URL) + def get_sagemaker_studio_user_presigned_url(*, uri: str): + with _session() as session: + user = SagemakerStudioService._get_sagemaker_studio_user(session, uri) + return sagemaker_studio_client(user).get_sagemaker_studio_user_presigned_url() + + @staticmethod + def get_sagemaker_studio_user_applications(*, uri: str): + with _session() as session: + user = SagemakerStudioService._get_sagemaker_studio_user(session, uri) + return sagemaker_studio_client(user).get_sagemaker_studio_user_applications() + + @staticmethod + @has_resource_permission(DELETE_SGMSTUDIO_USER) + def delete_sagemaker_studio_user(*, uri: str, delete_from_aws: bool): + """Deletes SageMaker Studio user from the database and if delete_from_aws is True from AWS as well""" + with _session() as session: + user = SagemakerStudioService._get_sagemaker_studio_user(session, uri) + env = EnvironmentService.get_environment_by_uri(session, user.environmentUri) + session.delete(user) + + ResourcePolicy.delete_resource_policy( + session=session, + resource_uri=user.sagemakerStudioUserUri, + group=user.SamlAdminGroupName, + ) + + if delete_from_aws: + stack_helper.delete_stack( + target_uri=uri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region + ) + return True + + @staticmethod + def _get_sagemaker_studio_user(session, uri): + user = SageMakerStudioRepository(session).find_sagemaker_studio_user(uri=uri) + if not user: + raise exceptions.ObjectNotFound('SagemakerStudioUser', uri) + return user diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py new file mode 100644 index 000000000..b2a10b065 --- /dev/null +++ b/backend/dataall/modules/notebooks/__init__.py @@ -0,0 +1,35 @@ +"""Contains the code related to SageMaker notebooks""" +import logging + +from dataall.base.loader import ImportMode, ModuleInterface +from dataall.core.stacks.db.target_type_repositories import TargetType + +log = logging.getLogger(__name__) + + +class NotebookApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for notebook GraphQl lambda""" + + @staticmethod + def is_supported(modes): + return ImportMode.API in modes + + def __init__(self): + import dataall.modules.notebooks.api + + from dataall.modules.notebooks.services.notebook_permissions import GET_NOTEBOOK, UPDATE_NOTEBOOK + TargetType("notebook", GET_NOTEBOOK, UPDATE_NOTEBOOK) + + log.info("API of sagemaker notebooks has been imported") + + +class NotebookCdkModuleInterface(ModuleInterface): + """Implements ModuleInterface for notebook ecs tasks""" + + @staticmethod + def is_supported(modes): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.notebooks.cdk + log.info("API of sagemaker notebooks has been imported") diff --git a/backend/dataall/modules/notebooks/api/__init__.py b/backend/dataall/modules/notebooks/api/__init__.py new file mode 100644 index 000000000..244e796d6 --- /dev/null +++ b/backend/dataall/modules/notebooks/api/__init__.py @@ -0,0 +1,4 @@ +"""The package defines the schema for SageMaker notebooks""" +from dataall.modules.notebooks.api import input_types, mutations, queries, types, resolvers + +__all__ = ["types", "input_types", "queries", "mutations", "resolvers"] diff --git a/backend/dataall/modules/notebooks/api/enums.py b/backend/dataall/modules/notebooks/api/enums.py new file mode 100644 index 000000000..3f276f51a --- /dev/null +++ b/backend/dataall/modules/notebooks/api/enums.py @@ -0,0 +1,10 @@ +"""Contains the enums GraphQL mapping for SageMaker notebooks """ +from dataall.base.api.constants import GraphQLEnumMapper + + +class SagemakerNotebookRole(GraphQLEnumMapper): + """Describes the SageMaker Notebook roles""" + + CREATOR = "950" + ADMIN = "900" + NO_PERMISSION = "000" diff --git a/backend/dataall/modules/notebooks/api/input_types.py b/backend/dataall/modules/notebooks/api/input_types.py new file mode 100644 index 000000000..9e072bc43 --- /dev/null +++ b/backend/dataall/modules/notebooks/api/input_types.py @@ -0,0 +1,39 @@ +"""The module defines GraphQL input types for the SageMaker notebooks""" +from dataall.base.api import gql + +NewSagemakerNotebookInput = gql.InputType( + name="NewSagemakerNotebookInput ", + arguments=[ + gql.Argument("label", gql.NonNullableType(gql.String)), + gql.Argument("description", gql.String), + gql.Argument("environmentUri", gql.NonNullableType(gql.String)), + gql.Argument("SamlAdminGroupName", gql.NonNullableType(gql.String)), + gql.Argument("tags", gql.ArrayType(gql.String)), + gql.Argument("topics", gql.String), + gql.Argument("VpcId", gql.String), + gql.Argument("SubnetId", gql.String), + gql.Argument("VolumeSizeInGB", gql.Integer), + gql.Argument("InstanceType", gql.String), + ], +) + +ModifySagemakerNotebookInput = gql.InputType( + name="ModifySagemakerNotebookInput", + arguments=[ + gql.Argument("label", gql.String), + gql.Argument("tags", gql.ArrayType(gql.String)), + gql.Argument("description", gql.String), + ], +) + +SagemakerNotebookFilter = gql.InputType( + name="SagemakerNotebookFilter", + arguments=[ + gql.Argument("term", gql.String), + gql.Argument("page", gql.Integer), + gql.Argument("pageSize", gql.Integer), + gql.Argument("sort", gql.String), + gql.Argument("limit", gql.Integer), + gql.Argument("offset", gql.Integer), + ], +) diff --git a/backend/dataall/modules/notebooks/api/mutations.py b/backend/dataall/modules/notebooks/api/mutations.py new file mode 100644 index 000000000..e7801f74e --- /dev/null +++ b/backend/dataall/modules/notebooks/api/mutations.py @@ -0,0 +1,39 @@ +"""The module defines GraphQL mutations for the SageMaker notebooks""" +from dataall.base.api import gql +from dataall.modules.notebooks.api.resolvers import ( + create_notebook, + delete_notebook, + start_notebook, + stop_notebook, +) + +createSagemakerNotebook = gql.MutationField( + name="createSagemakerNotebook", + args=[gql.Argument(name="input", type=gql.Ref("NewSagemakerNotebookInput"))], + type=gql.Ref("SagemakerNotebook"), + resolver=create_notebook, +) + +startSagemakerNotebook = gql.MutationField( + name="startSagemakerNotebook", + args=[gql.Argument(name="notebookUri", type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=start_notebook, +) + +stopSagemakerNotebook = gql.MutationField( + name="stopSagemakerNotebook", + args=[gql.Argument(name="notebookUri", type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=stop_notebook, +) + +deleteSagemakerNotebook = gql.MutationField( + name="deleteSagemakerNotebook", + args=[ + gql.Argument(name="notebookUri", type=gql.NonNullableType(gql.String)), + gql.Argument(name="deleteFromAWS", type=gql.Boolean), + ], + type=gql.String, + resolver=delete_notebook, +) diff --git a/backend/dataall/modules/notebooks/api/queries.py b/backend/dataall/modules/notebooks/api/queries.py new file mode 100644 index 000000000..134fde189 --- /dev/null +++ b/backend/dataall/modules/notebooks/api/queries.py @@ -0,0 +1,28 @@ +"""The module defines GraphQL queries for the SageMaker notebooks""" +from dataall.base.api import gql +from dataall.modules.notebooks.api.resolvers import ( + get_notebook, + list_notebooks, + get_notebook_presigned_url +) + +getSagemakerNotebook = gql.QueryField( + name="getSagemakerNotebook", + args=[gql.Argument(name="notebookUri", type=gql.NonNullableType(gql.String))], + type=gql.Ref("SagemakerNotebook"), + resolver=get_notebook, +) + +listSagemakerNotebooks = gql.QueryField( + name="listSagemakerNotebooks", + args=[gql.Argument("filter", gql.Ref("SagemakerNotebookFilter"))], + type=gql.Ref("SagemakerNotebookSearchResult"), + resolver=list_notebooks, +) + +getSagemakerNotebookPresignedUrl = gql.QueryField( + name="getSagemakerNotebookPresignedUrl", + args=[gql.Argument(name="notebookUri", type=gql.NonNullableType(gql.String))], + type=gql.String, + resolver=get_notebook_presigned_url, +) diff --git a/backend/dataall/modules/notebooks/api/resolvers.py b/backend/dataall/modules/notebooks/api/resolvers.py new file mode 100644 index 000000000..c30e7c00e --- /dev/null +++ b/backend/dataall/modules/notebooks/api/resolvers.py @@ -0,0 +1,119 @@ +from dataall.base.api.context import Context +from dataall.core.stacks.api import stack_helper +from dataall.base.db import exceptions +from dataall.modules.notebooks.api.enums import SagemakerNotebookRole +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook +from dataall.modules.notebooks.services.notebook_service import NotebookService, NotebookCreationRequest + + +def create_notebook(context: Context, source: SagemakerNotebook, input: dict = None): + """Creates a SageMaker notebook. Deploys the notebooks stack into AWS""" + RequestValidator.validate_creation_request(input) + request = NotebookCreationRequest.from_dict(input) + return NotebookService.create_notebook( + uri=input["environmentUri"], + admin_group=input["SamlAdminGroupName"], + request=request + ) + + +def list_notebooks(context, source, filter: dict = None): + """ + Lists all SageMaker notebooks using the given filter. + If the filter is not provided, all notebooks are returned. + """ + + if not filter: + filter = {} + return NotebookService.list_user_notebooks(filter) + + +def get_notebook(context, source, notebookUri: str = None): + """Retrieve a SageMaker notebook by URI.""" + RequestValidator.required_uri(notebookUri) + return NotebookService.get_notebook(uri=notebookUri) + + +def resolve_notebook_status(context, source: SagemakerNotebook, **kwargs): + """Resolves the status of a notebook.""" + if not source: + return None + return NotebookService.get_notebook_status(uri=source.notebookUri) + + +def start_notebook(context, source: SagemakerNotebook, notebookUri: str = None): + """Starts a sagemaker notebook instance""" + RequestValidator.required_uri(notebookUri) + NotebookService.start_notebook(uri=notebookUri) + return 'Starting' + + +def stop_notebook(context, source: SagemakerNotebook, notebookUri: str = None): + """Stops a notebook instance.""" + RequestValidator.required_uri(notebookUri) + NotebookService.stop_notebook(uri=notebookUri) + return 'Stopping' + + +def get_notebook_presigned_url(context, source: SagemakerNotebook, notebookUri: str = None): + """Creates and returns a presigned url for a notebook""" + RequestValidator.required_uri(notebookUri) + return NotebookService.get_notebook_presigned_url(uri=notebookUri) + + +def delete_notebook( + context, + source: SagemakerNotebook, + notebookUri: str = None, + deleteFromAWS: bool = None, +): + """ + Deletes the SageMaker notebook. + Deletes the notebooks stack from AWS if deleteFromAWS is True + """ + RequestValidator.required_uri(notebookUri) + NotebookService.delete_notebook(uri=notebookUri, delete_from_aws=deleteFromAWS) + return True + + +def resolve_user_role(context: Context, source: SagemakerNotebook): + if not source: + return None + if source.owner == context.username: + return SagemakerNotebookRole.CREATOR.value + elif context.groups and source.SamlAdminGroupName in context.groups: + return SagemakerNotebookRole.ADMIN.value + return SagemakerNotebookRole.NO_PERMISSION.value + + +def resolve_notebook_stack(context: Context, source: SagemakerNotebook, **kwargs): + if not source: + return None + return stack_helper.get_stack_with_cfn_resources( + targetUri=source.notebookUri, + environmentUri=source.environmentUri, + ) + + +class RequestValidator: + """Aggregates all validation logic for operating with notebooks""" + @staticmethod + def required_uri(uri): + if not uri: + raise exceptions.RequiredParameter('URI') + + @staticmethod + def validate_creation_request(data): + required = RequestValidator._required + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('label'): + raise exceptions.RequiredParameter('name') + + required(data, "environmentUri") + required(data, "SamlAdminGroupName") + + @staticmethod + def _required(data: dict, name: str): + if not data.get(name): + raise exceptions.RequiredParameter(name) diff --git a/backend/dataall/modules/notebooks/api/types.py b/backend/dataall/modules/notebooks/api/types.py new file mode 100644 index 000000000..f08d395ce --- /dev/null +++ b/backend/dataall/modules/notebooks/api/types.py @@ -0,0 +1,62 @@ +"""Defines the object types of the SageMaker notebooks""" +from dataall.base.api import gql +from dataall.modules.notebooks.api.resolvers import ( + resolve_notebook_stack, + resolve_notebook_status, + resolve_user_role, +) + +from dataall.core.environment.api.resolvers import resolve_environment +from dataall.core.organizations.api.resolvers import resolve_organization_by_env + +from dataall.modules.notebooks.api.enums import SagemakerNotebookRole + +SagemakerNotebook = gql.ObjectType( + name="SagemakerNotebook", + fields=[ + gql.Field(name="notebookUri", type=gql.ID), + gql.Field(name="environmentUri", type=gql.NonNullableType(gql.String)), + gql.Field(name="label", type=gql.String), + gql.Field(name="description", type=gql.String), + gql.Field(name="tags", type=gql.ArrayType(gql.String)), + gql.Field(name="name", type=gql.String), + gql.Field(name="owner", type=gql.String), + gql.Field(name="created", type=gql.String), + gql.Field(name="updated", type=gql.String), + gql.Field(name="SamlAdminGroupName", type=gql.String), + gql.Field(name="VpcId", type=gql.String), + gql.Field(name="SubnetId", type=gql.String), + gql.Field(name="InstanceType", type=gql.String), + gql.Field(name="RoleArn", type=gql.String), + gql.Field(name="VolumeSizeInGB", type=gql.Integer), + gql.Field( + name="userRoleForNotebook", + type=SagemakerNotebookRole.toGraphQLEnum(), + resolver=resolve_user_role, + ), + gql.Field(name="NotebookInstanceStatus", type=gql.String, resolver=resolve_notebook_status), + gql.Field( + name="environment", + type=gql.Ref("Environment"), + resolver=resolve_environment, + ), + gql.Field( + name="organization", + type=gql.Ref("Organization"), + resolver=resolve_organization_by_env, + ), + gql.Field(name="stack", type=gql.Ref("Stack"), resolver=resolve_notebook_stack), + ], +) + +SagemakerNotebookSearchResult = gql.ObjectType( + name="SagemakerNotebookSearchResult", + fields=[ + gql.Field(name="count", type=gql.Integer), + gql.Field(name="page", type=gql.Integer), + gql.Field(name="pages", type=gql.Integer), + gql.Field(name="hasNext", type=gql.Boolean), + gql.Field(name="hasPrevious", type=gql.Boolean), + gql.Field(name="nodes", type=gql.ArrayType(SagemakerNotebook)), + ], +) diff --git a/backend/dataall/modules/notebooks/aws/__init__.py b/backend/dataall/modules/notebooks/aws/__init__.py new file mode 100644 index 000000000..873d3c5d3 --- /dev/null +++ b/backend/dataall/modules/notebooks/aws/__init__.py @@ -0,0 +1 @@ +"""Contains code that send requests to AWS using SDK (boto3)""" diff --git a/backend/dataall/modules/notebooks/aws/sagemaker_notebook_client.py b/backend/dataall/modules/notebooks/aws/sagemaker_notebook_client.py new file mode 100644 index 000000000..e7fea85a4 --- /dev/null +++ b/backend/dataall/modules/notebooks/aws/sagemaker_notebook_client.py @@ -0,0 +1,61 @@ +import logging + +from dataall.base.aws.sts import SessionHelper +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook +from botocore.exceptions import ClientError + +logger = logging.getLogger(__name__) + + +class SagemakerClient: + """ + A Sagemaker notebooks proxy client that is used to send requests to AWS + """ + def __init__(self, notebook: SagemakerNotebook): + session = SessionHelper.remote_session(notebook.AWSAccountId) + self._client = session.client('sagemaker', region_name=notebook.region) + self._instance_name = notebook.NotebookInstanceName + + def get_notebook_instance_status(self) -> str: + """Remote call to AWS to check the notebook's status""" + try: + response = self._client.describe_notebook_instance( + NotebookInstanceName=self._instance_name + ) + return response.get('NotebookInstanceStatus', 'NOT FOUND') + except ClientError as e: + logger.error( + f'Could not retrieve instance {self._instance_name} status due to: {e} ' + ) + return 'NOT FOUND' + + def presigned_url(self): + """Creates a presigned url for a notebook instance by sending request to AWS""" + try: + response = self._client.create_presigned_notebook_instance_url( + NotebookInstanceName=self._instance_name + ) + return response['AuthorizedUrl'] + except ClientError as e: + raise e + + def start_instance(self): + """Starts the notebooks instance by sending a request to AWS""" + try: + status = self.get_notebook_instance_status() + self._client.start_notebook_instance(NotebookInstanceName=self._instance_name) + return status + except ClientError as e: + return e + + def stop_instance(self) -> None: + """Stops the notebooks instance by sending a request to AWS""" + try: + self._client.stop_notebook_instance(NotebookInstanceName=self._instance_name) + except ClientError as e: + raise e + + +def client(notebook: SagemakerNotebook) -> SagemakerClient: + """Factory method to retrieve the client to send request to AWS""" + return SagemakerClient(notebook) diff --git a/backend/dataall/modules/notebooks/cdk/__init__.py b/backend/dataall/modules/notebooks/cdk/__init__.py new file mode 100644 index 000000000..b3caed898 --- /dev/null +++ b/backend/dataall/modules/notebooks/cdk/__init__.py @@ -0,0 +1,7 @@ +""" +This package contains modules that are used to create a CloudFormation stack in AWS. +The code is invoked in ECS Fargate to initialize the creation of the stack +""" +from dataall.modules.notebooks.cdk import notebook_stack, env_role_notebook_policy, pivot_role_notebooks_policy + +__all__ = ["notebook_stack", "env_role_notebook_policy", "pivot_role_notebooks_policy"] diff --git a/backend/dataall/modules/notebooks/cdk/env_role_notebook_policy.py b/backend/dataall/modules/notebooks/cdk/env_role_notebook_policy.py new file mode 100644 index 000000000..5e29d1f59 --- /dev/null +++ b/backend/dataall/modules/notebooks/cdk/env_role_notebook_policy.py @@ -0,0 +1,211 @@ +from aws_cdk import aws_iam as iam + +from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy +from dataall.modules.notebooks.services.notebook_permissions import CREATE_NOTEBOOK + + +class SagemakernotebookPolicy(ServicePolicy): + """ + Class including all permissions needed to work with Amazon SageMaker. + - Allow creation and management of SageMaker Notebooks only if tagged with team tag + - DO NOT allow creation of domain because this is handled in the environment stack + - DO NOT allow creation of user-profiles because this is handled in the ML Studio stack + - Allow management of domains and user-profiles tagged with team tag + - Allow any action besides the above listed ones on resources that are not notebooks, domains, apps and user-profiles + - Allow support permissions on ECR, Service Catalog and logging + """ + # TODO (in cleanup tasks): Remove those policies that are only needed for SM studio, right now we have both + def get_statements(self, group_permissions, **kwargs): + if CREATE_NOTEBOOK not in group_permissions: + return [] + + return [ + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=['sagemaker:AddTags'], + resources=['*'], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value], + f'aws:RequestTag/{self.tag_key}': [self.tag_value], + }, + }, + ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:List*', + 'sagemaker:List*', + 'sagemaker:Describe*', + 'sagemaker:BatchGet*', + 'sagemaker:BatchDescribe*', + 'sagemaker:Search', + 'sagemaker:RenderUiTemplate', + 'sagemaker:GetSearchSuggestions', + 'sagemaker:QueryLineage', + 'sagemaker:GetSagemakerServicecatalogPortfolioStatus', + 'sagemaker:CreateNotebookInstanceLifecycleConfig', + 'sagemaker:DeleteNotebookInstanceLifecycleConfig', + ], + resources=['*'], + ), + # SageMaker Notebooks permissions + iam.PolicyStatement( + # sid="SageMakerCreateTaggedResourcesNotebooks", + effect=iam.Effect.ALLOW, + actions=['sagemaker:CreateNotebookInstance'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', + + ], + conditions={ + 'StringEquals': { + f'aws:RequestTag/{self.tag_key}': [self.tag_value], + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + }, + }, + ), + iam.PolicyStatement( + # sid="SageMakerCreatePresignedNotebookInstanceUrl", + effect=iam.Effect.ALLOW, + actions=['sagemaker:CreatePresignedNotebookInstanceUrl'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', + ], + conditions={ + 'StringEquals': { + f'sagemaker:ResourceTag/{self.tag_key}': [self.tag_value] + }, + }, + ), + iam.PolicyStatement( + # sid="SageMakerManageResourcesNotebooks", + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:*NotebookInstance', + ], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:notebook-instance/{self.resource_prefix}*', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value] + }, + }, + ), + # SageMaker Studio permissions + iam.PolicyStatement( + # sid="SageMakerManageTeamResourcesMLStudio", + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:DeleteDomain', + 'sagemaker:DeleteUserProfile', + 'sagemaker:UpdateDomain', + 'sagemaker:UpdateUserProfile', + ], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:domain/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_key] + } + }, + ), + # For everything that is not domains and user-profiles we allow permissions if the resource is tagged + # Deny on creation of domains and users, generic allow for prefixed and tagged resources + # allow for apps (cannot be tagged) and special tag needed for CreatePresignedDomainUrl + iam.PolicyStatement( + # sid="SageMakerDenyCreateDomainsUsers", + effect=iam.Effect.DENY, + actions=['sagemaker:Create*'], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:domain/*', + f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*', + ], + ), + iam.PolicyStatement( + # sid="SageMakerCreateGenericResources", + effect=iam.Effect.ALLOW, + actions=['sagemaker:Create*'], + not_resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*', + f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*/*', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value], + f'aws:RequestTag/{self.tag_key}': [self.tag_value], + }, + }, + ), + iam.PolicyStatement( + # sid="SageMakerApps", + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:CreateApp', + 'sagemaker:DeleteApp' + ], + resources=[f'arn:aws:sagemaker:{self.region}:{self.account}:app/*/*'] + ), + iam.PolicyStatement( + # sid="SageMakerCreatePresignedDomainUrl", + effect=iam.Effect.ALLOW, + actions=['sagemaker:CreatePresignedDomainUrl'], + resources=[f'arn:aws:sagemaker:{self.region}:{self.account}:user-profile/*/*'], + conditions={ + 'StringEquals': { + f'sagemaker:ResourceTag/{self.tag_key}': [self.tag_value] + }, + }, + ), + iam.PolicyStatement( + # sid="SageMakerManageGenericResources", + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:Delete*', + 'sagemaker:Update*', + 'sagemaker:Start*', + 'sagemaker:Stop*', + 'sagemaker:InvokeEndpoint', + 'sagemaker:InvokeEndpointAsync' + ], + resources=[ + f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*', + f'arn:aws:sagemaker:{self.region}:{self.account}:*/{self.resource_prefix}*/*', + ], + conditions={ + 'StringEquals': { + f'aws:ResourceTag/{self.tag_key}': [self.tag_value], + }, + }, + ), + # Logging and support permissions + iam.PolicyStatement( + # sid="SageMakerLogging", + effect=iam.Effect.ALLOW, + actions=[ + 'logs:CreateLogGroup', + 'logs:CreateLogStream', + 'logs:PutLogEvents' + ], + resources=[ + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*', + f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/sagemaker/*:log-stream:*', + ] + ), + iam.PolicyStatement( + # sid="SageMakerSupport", + effect=iam.Effect.ALLOW, + actions=[ + 'ecr:GetAuthorizationToken', + 'ecr:BatchCheckLayerAvailability', + 'ecr:GetDownloadUrlForLayer', + 'ecr:BatchGetImage', + 'servicecatalog:ListAcceptedPortfolioShares', + 'servicecatalog:ListPrincipalsForPortfolio', + ], + resources=['*'] + ) + ] diff --git a/backend/dataall/modules/notebooks/cdk/notebook_stack.py b/backend/dataall/modules/notebooks/cdk/notebook_stack.py new file mode 100644 index 000000000..29ed53bf1 --- /dev/null +++ b/backend/dataall/modules/notebooks/cdk/notebook_stack.py @@ -0,0 +1,169 @@ +"""" +Creates a CloudFormation stack for SageMaker notebooks using cdk +""" +import logging +import os + +from aws_cdk import ( + aws_sagemaker as sagemaker, + aws_ec2 as ec2, + aws_kms as kms, + aws_iam as iam, + Stack, + CfnOutput, +) + +from dataall.base.aws.sts import SessionHelper +from dataall.base.cdkproxy.stacks.manager import stack +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.stacks.services.runtime_stacks_tagging import TagsUtil +from dataall.base.db import Engine, get_engine +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook +from dataall.base.utils.cdk_nag_utils import CDKNagUtil + +logger = logging.getLogger(__name__) + + +@stack(stack='notebook') +class NotebookStack(Stack): + """ + Creation of a notebook stack. + Having imported the notebook module, the class registers itself using @stack + Then it will be reachable by HTTP request / using SQS from GraphQL lambda + """ + + module_name = __file__ + + def get_engine(self) -> Engine: + envname = os.environ.get('envname', 'local') + engine = get_engine(envname=envname) + return engine + + def get_target(self, target_uri) -> SagemakerNotebook: + engine = self.get_engine() + with engine.scoped_session() as session: + notebook = session.query(SagemakerNotebook).get(target_uri) + return notebook + + def get_env_group( + self, notebook: SagemakerNotebook + ) -> EnvironmentGroup: + engine = self.get_engine() + with engine.scoped_session() as session: + env_group = EnvironmentService.get_environment_group( + session, notebook.SamlAdminGroupName, notebook.environmentUri + ) + return env_group + + def __init__(self, scope, id: str, target_uri: str = None, **kwargs) -> None: + super().__init__(scope, + id, + description="Cloud formation stack of NOTEBOOK: {}; URI: {}; DESCRIPTION: {}".format( + self.get_target(target_uri=target_uri).label, + target_uri, + self.get_target(target_uri=target_uri).description, + )[:1024], + **kwargs) + + # Required for dynamic stack tagging + self.target_uri = target_uri + + notebook: SagemakerNotebook = self.get_target(target_uri=target_uri) + + env_group = self.get_env_group(notebook) + + cdk_exec_role = SessionHelper.get_cdk_exec_role_arn(notebook.AWSAccountId, notebook.region) + + notebook_key = kms.Key( + self, + 'NotebookKmsKey', + alias=notebook.NotebookInstanceName, + enable_key_rotation=True, + admins=[ + iam.ArnPrincipal(cdk_exec_role), + ], + policy=iam.PolicyDocument( + assign_sids=True, + statements=[ + iam.PolicyStatement( + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[ + iam.ArnPrincipal(notebook.RoleArn) + ], + actions=[ + "kms:Encrypt", + "kms:Decrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*", + "kms:DescribeKey" + ], + conditions={ + "StringEquals": {"kms:ViaService": f"sagemaker.{notebook.region}.amazonaws.com"} + } + ), + iam.PolicyStatement( + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[ + iam.ArnPrincipal(notebook.RoleArn) + ], + actions=[ + "kms:DescribeKey", + "kms:List*", + "kms:GetKeyPolicy", + ] + ) + ], + ), + ) + + if not (notebook.VpcId and notebook.SubnetId): + sagemaker.CfnNotebookInstance( + self, + f'Notebook{target_uri}', + instance_type=notebook.InstanceType, + role_arn=notebook.RoleArn, + direct_internet_access='Enabled', + notebook_instance_name=notebook.NotebookInstanceName, + kms_key_id=notebook_key.key_id, + ) + else: + vpc = ec2.Vpc.from_lookup(self, 'NotebookVPC', vpc_id=notebook.VpcId) + security_group = ec2.SecurityGroup( + self, + f'sgNotebook{target_uri}', + vpc=vpc, + allow_all_outbound=True, + security_group_name=notebook.NotebookInstanceName, + ) + security_group.connections.allow_from( + ec2.Peer.ipv4(vpc.vpc_cidr_block), + ec2.Port.tcp(443), + 'Allow inbound HTTPS', + ) + + sagemaker.CfnNotebookInstance( + self, + f'Notebook{target_uri}', + instance_type=notebook.InstanceType, + role_arn=notebook.RoleArn, + direct_internet_access='Disabled', + subnet_id=notebook.SubnetId, + security_group_ids=[security_group.security_group_id], + notebook_instance_name=notebook.NotebookInstanceName, + kms_key_id=notebook_key.key_id, + volume_size_in_gb=notebook.VolumeSizeInGB, + ) + + CfnOutput( + self, + 'NotebookInstanceName', + export_name=f'{notebook.notebookUri}-NotebookInstanceName', + value=notebook.NotebookInstanceName, + ) + + TagsUtil.add_tags(stack=self, model=SagemakerNotebook, target_type="notebook") + + CDKNagUtil.check_rules(self) diff --git a/backend/dataall/modules/notebooks/cdk/pivot_role_notebooks_policy.py b/backend/dataall/modules/notebooks/cdk/pivot_role_notebooks_policy.py new file mode 100644 index 000000000..c3dfe178f --- /dev/null +++ b/backend/dataall/modules/notebooks/cdk/pivot_role_notebooks_policy.py @@ -0,0 +1,49 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class NotebooksPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with AWS SageMaker. + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='SageMakerNotebookActions', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListTags', + 'sagemaker:StopNotebookInstance', + 'sagemaker:CreatePresignedNotebookInstanceUrl', + 'sagemaker:DescribeNotebookInstance', + 'sagemaker:StartNotebookInstance', + 'sagemaker:AddTags', + ], + resources=[ + f'arn:aws:sagemaker:*:{self.account}:notebook-instance/{self.env_resource_prefix}*', + ], + ), + iam.PolicyStatement( + sid='SageMakerNotebookList', + effect=iam.Effect.ALLOW, + actions=[ + 'sagemaker:ListNotebookInstances', + ], + resources=['*'], + ), + iam.PolicyStatement( + sid='EC2SGNotebooks', + effect=iam.Effect.ALLOW, + actions=[ + 'ec2:DescribeSubnets', + 'ec2:DescribeSecurityGroups', + 'ec2:DescribeVpcs', + 'ec2:DescribeInstances', + 'ec2:DescribeNetworkInterfaces', + ], + resources=['*'], + ), + ] + return statements diff --git a/backend/dataall/modules/notebooks/db/__init__.py b/backend/dataall/modules/notebooks/db/__init__.py new file mode 100644 index 000000000..86631d191 --- /dev/null +++ b/backend/dataall/modules/notebooks/db/__init__.py @@ -0,0 +1 @@ +"""Contains a code to that interacts with the database""" diff --git a/backend/dataall/modules/notebooks/db/notebook_models.py b/backend/dataall/modules/notebooks/db/notebook_models.py new file mode 100644 index 000000000..bfa7baff7 --- /dev/null +++ b/backend/dataall/modules/notebooks/db/notebook_models.py @@ -0,0 +1,26 @@ +"""ORM models for sagemaker notebooks""" + +from sqlalchemy import Column, String, Integer, ForeignKey + +from dataall.base.db import Base +from dataall.base.db import Resource, utils + + +class SagemakerNotebook(Resource, Base): + """Describes ORM model for sagemaker notebooks""" + + __tablename__ = 'sagemaker_notebook' + environmentUri = Column(String, ForeignKey("environment.environmentUri"), nullable=False) + notebookUri = Column(String, primary_key=True, default=utils.uuid('notebook')) + NotebookInstanceName = Column( + String, nullable=False, default=utils.slugifier('label') + ) + NotebookInstanceStatus = Column(String, nullable=False) + AWSAccountId = Column(String, nullable=False) + RoleArn = Column(String, nullable=False) + region = Column(String, default='eu-west-1') + SamlAdminGroupName = Column(String, nullable=True) + VpcId = Column(String, nullable=True) + SubnetId = Column(String, nullable=True) + VolumeSizeInGB = Column(Integer, nullable=True) + InstanceType = Column(String, nullable=True) diff --git a/backend/dataall/modules/notebooks/db/notebook_repository.py b/backend/dataall/modules/notebooks/db/notebook_repository.py new file mode 100644 index 000000000..cfb451857 --- /dev/null +++ b/backend/dataall/modules/notebooks/db/notebook_repository.py @@ -0,0 +1,67 @@ +""" +DAO layer that encapsulates the logic and interaction with the database for notebooks +Provides the API to retrieve / update / delete notebooks +""" +from sqlalchemy import or_ +from sqlalchemy.sql import and_ +from sqlalchemy.orm import Query + +from dataall.base.db import paginate +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource + + +class NotebookRepository(EnvironmentResource): + """DAO layer for notebooks""" + _DEFAULT_PAGE = 1 + _DEFAULT_PAGE_SIZE = 10 + + def __init__(self, session): + self._session = session + + def save_notebook(self, notebook): + """Save notebook to the database""" + self._session.add(notebook) + self._session.commit() + + def find_notebook(self, uri) -> SagemakerNotebook: + """Finds a notebook. Returns None if the notebook doesn't exist""" + return self._session.query(SagemakerNotebook).get(uri) + + def paginated_user_notebooks(self, username, groups, filter=None) -> dict: + """Returns a page of user notebooks""" + return paginate( + query=self._query_user_notebooks(username, groups, filter), + page=filter.get('page', NotebookRepository._DEFAULT_PAGE), + page_size=filter.get('pageSize', NotebookRepository._DEFAULT_PAGE_SIZE), + ).to_dict() + + def _query_user_notebooks(self, username, groups, filter) -> Query: + query = self._session.query(SagemakerNotebook).filter( + or_( + SagemakerNotebook.owner == username, + SagemakerNotebook.SamlAdminGroupName.in_(groups), + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + SagemakerNotebook.description.ilike( + filter.get('term') + '%%' + ), + SagemakerNotebook.label.ilike(filter.get('term') + '%%'), + ) + ) + return query + + def count_resources(self, environment_uri, group_uri): + return ( + self._session.query(SagemakerNotebook) + .filter( + and_( + SagemakerNotebook.environmentUri == environment_uri, + SagemakerNotebook.SamlAdminGroupName == group_uri + ) + ) + .count() + ) diff --git a/backend/dataall/modules/notebooks/services/__init__.py b/backend/dataall/modules/notebooks/services/__init__.py new file mode 100644 index 000000000..5f13a14f1 --- /dev/null +++ b/backend/dataall/modules/notebooks/services/__init__.py @@ -0,0 +1,7 @@ +""" +Contains the code needed for service layer. +The service layer is a layer where all business logic is aggregated +""" +from dataall.modules.notebooks.services import notebook_service, notebook_permissions + +__all__ = ["notebook_service", "notebook_permissions"] diff --git a/backend/dataall/modules/notebooks/services/notebook_permissions.py b/backend/dataall/modules/notebooks/services/notebook_permissions.py new file mode 100644 index 000000000..9084e89c0 --- /dev/null +++ b/backend/dataall/modules/notebooks/services/notebook_permissions.py @@ -0,0 +1,42 @@ +""" +Add module's permissions to the global permissions. +Contains permissions for sagemaker notebooks +""" + +from dataall.core.permissions.permissions import ( + ENVIRONMENT_ALL, + ENVIRONMENT_INVITED, + RESOURCES_ALL_WITH_DESC, + RESOURCES_ALL, + ENVIRONMENT_INVITATION_REQUEST, + TENANT_ALL, + TENANT_ALL_WITH_DESC +) + +GET_NOTEBOOK = "GET_NOTEBOOK" +UPDATE_NOTEBOOK = "UPDATE_NOTEBOOK" +DELETE_NOTEBOOK = "DELETE_NOTEBOOK" +CREATE_NOTEBOOK = "CREATE_NOTEBOOK" +MANAGE_NOTEBOOKS = "MANAGE_NOTEBOOKS" + +NOTEBOOK_ALL = [ + GET_NOTEBOOK, + DELETE_NOTEBOOK, + UPDATE_NOTEBOOK, +] + +ENVIRONMENT_ALL.append(CREATE_NOTEBOOK) +ENVIRONMENT_INVITED.append(CREATE_NOTEBOOK) +ENVIRONMENT_INVITATION_REQUEST.append(CREATE_NOTEBOOK) + +TENANT_ALL.append(MANAGE_NOTEBOOKS) +TENANT_ALL_WITH_DESC[MANAGE_NOTEBOOKS] = "Manage notebooks" + + +RESOURCES_ALL.append(CREATE_NOTEBOOK) +RESOURCES_ALL.extend(NOTEBOOK_ALL) + +RESOURCES_ALL_WITH_DESC[CREATE_NOTEBOOK] = "Create notebooks on this environment" +RESOURCES_ALL_WITH_DESC[GET_NOTEBOOK] = "General permission to get a notebook" +RESOURCES_ALL_WITH_DESC[DELETE_NOTEBOOK] = "Permission to delete a notebook" +RESOURCES_ALL_WITH_DESC[UPDATE_NOTEBOOK] = "Permission to edit a notebook" diff --git a/backend/dataall/modules/notebooks/services/notebook_service.py b/backend/dataall/modules/notebooks/services/notebook_service.py new file mode 100644 index 000000000..17e738fe9 --- /dev/null +++ b/backend/dataall/modules/notebooks/services/notebook_service.py @@ -0,0 +1,230 @@ +""" +A service layer for sagemaker notebooks +Central part for working with notebooks +""" +import dataclasses +import logging +from dataclasses import dataclass, field +from typing import List, Dict + +from dataall.base.context import get_context as context +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.env_permission_checker import has_group_permission +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_resource_permission, has_tenant_permission +from dataall.core.stacks.api import stack_helper +from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTag +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.base.db import exceptions +from dataall.modules.notebooks.aws.sagemaker_notebook_client import client +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook +from dataall.modules.notebooks.db.notebook_repository import NotebookRepository +from dataall.modules.notebooks.services.notebook_permissions import MANAGE_NOTEBOOKS, CREATE_NOTEBOOK, NOTEBOOK_ALL, \ + GET_NOTEBOOK, UPDATE_NOTEBOOK, DELETE_NOTEBOOK +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) +from dataall.base.utils import slugify + +logger = logging.getLogger(__name__) + + +@dataclass +class NotebookCreationRequest: + """A request dataclass for notebook creation. Adds default values for missed parameters""" + label: str + VpcId: str + SubnetId: str + SamlAdminGroupName: str + environment: Dict = field(default_factory=dict) + description: str = "No description provided" + VolumeSizeInGB: int = 32 + InstanceType: str = "ml.t3.medium" + tags: List[str] = field(default_factory=list) + + @classmethod + def from_dict(cls, env): + """Copies only required fields from the dictionary and creates an instance of class""" + fields = set([f.name for f in dataclasses.fields(cls)]) + return cls(**{ + k: v for k, v in env.items() + if k in fields + }) + + +class NotebookService: + """ + Encapsulate the logic of interactions with sagemaker notebooks. + """ + + _NOTEBOOK_RESOURCE_TYPE = "notebook" + + @staticmethod + @has_tenant_permission(MANAGE_NOTEBOOKS) + @has_resource_permission(CREATE_NOTEBOOK) + @has_group_permission(CREATE_NOTEBOOK) + def create_notebook(*, uri: str, admin_group: str, request: NotebookCreationRequest) -> SagemakerNotebook: + """ + Creates a notebook and attach policies to it + Throws an exception if notebook are not enabled for the environment + """ + + with _session() as session: + env = EnvironmentService.get_environment_by_uri(session, uri) + enabled = EnvironmentService.get_boolean_env_param(session, env, "notebooksEnabled") + + if not enabled: + raise exceptions.UnauthorizedOperation( + action=CREATE_NOTEBOOK, + message=f'Notebooks feature is disabled for the environment {env.label}', + ) + + env_group = request.environment + if not env_group: + env_group = EnvironmentService.get_environment_group( + session, + group_uri=admin_group, + environment_uri=env.environmentUri, + ) + + notebook = SagemakerNotebook( + label=request.label, + environmentUri=env.environmentUri, + description=request.description, + NotebookInstanceName=slugify(request.label, separator=''), + NotebookInstanceStatus='NotStarted', + AWSAccountId=env.AwsAccountId, + region=env.region, + RoleArn=env_group.environmentIAMRoleArn, + owner=context().username, + SamlAdminGroupName=admin_group, + tags=request.tags, + VpcId=request.VpcId, + SubnetId=request.SubnetId, + VolumeSizeInGB=request.VolumeSizeInGB, + InstanceType=request.InstanceType, + ) + + NotebookRepository(session).save_notebook(notebook) + + notebook.NotebookInstanceName = NamingConventionService( + target_uri=notebook.notebookUri, + target_label=notebook.label, + pattern=NamingConventionPattern.NOTEBOOK, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + + ResourcePolicy.attach_resource_policy( + session=session, + group=request.SamlAdminGroupName, + permissions=NOTEBOOK_ALL, + resource_uri=notebook.notebookUri, + resource_type=SagemakerNotebook.__name__, + ) + + if env.SamlGroupName != admin_group: + ResourcePolicy.attach_resource_policy( + session=session, + group=env.SamlGroupName, + permissions=NOTEBOOK_ALL, + resource_uri=notebook.notebookUri, + resource_type=SagemakerNotebook.__name__, + ) + + Stack.create_stack( + session=session, + environment_uri=notebook.environmentUri, + target_type='notebook', + target_uri=notebook.notebookUri, + target_label=notebook.label, + ) + + stack_helper.deploy_stack(targetUri=notebook.notebookUri) + + return notebook + + @staticmethod + def list_user_notebooks(filter) -> dict: + """List existed user notebooks. Filters only required notebooks by the filter param""" + with _session() as session: + return NotebookRepository(session).paginated_user_notebooks( + username=context().username, + groups=context().groups, + filter=filter + ) + + @staticmethod + @has_resource_permission(GET_NOTEBOOK) + def get_notebook(*, uri) -> SagemakerNotebook: + """Gets a notebook by uri""" + with _session() as session: + return NotebookService._get_notebook(session, uri) + + @staticmethod + @has_resource_permission(UPDATE_NOTEBOOK) + def start_notebook(*, uri): + """Starts notebooks instance""" + notebook = NotebookService.get_notebook(uri=uri) + client(notebook).start_instance() + + @staticmethod + @has_resource_permission(UPDATE_NOTEBOOK) + def stop_notebook(*, uri: str) -> None: + """Stop notebook instance""" + notebook = NotebookService.get_notebook(uri=uri) + client(notebook).stop_instance() + + @staticmethod + @has_resource_permission(GET_NOTEBOOK) + def get_notebook_presigned_url(*, uri: str) -> str: + """Creates and returns a presigned url for a notebook""" + notebook = NotebookService.get_notebook(uri=uri) + return client(notebook).presigned_url() + + @staticmethod + @has_resource_permission(GET_NOTEBOOK) + def get_notebook_status(*, uri) -> str: + """Retrieves notebook status""" + notebook = NotebookService.get_notebook(uri=uri) + return client(notebook).get_notebook_instance_status() + + @staticmethod + @has_resource_permission(DELETE_NOTEBOOK) + def delete_notebook(*, uri: str, delete_from_aws: bool): + """Deletes notebook from the database and if delete_from_aws is True from AWS as well""" + with _session() as session: + notebook = NotebookService._get_notebook(session, uri) + KeyValueTag.delete_key_value_tags(session, notebook.notebookUri, 'notebook') + session.delete(notebook) + + ResourcePolicy.delete_resource_policy( + session=session, + resource_uri=notebook.notebookUri, + group=notebook.SamlAdminGroupName, + ) + + env: Environment = EnvironmentService.get_environment_by_uri( + session, notebook.environmentUri + ) + + if delete_from_aws: + stack_helper.delete_stack( + target_uri=uri, + accountid=env.AwsAccountId, + cdk_role_arn=env.CDKRoleArn, + region=env.region + ) + + @staticmethod + def _get_notebook(session, uri) -> SagemakerNotebook: + notebook = NotebookRepository(session).find_notebook(uri) + + if not notebook: + raise exceptions.ObjectNotFound('SagemakerNotebook', uri) + return notebook + + +def _session(): + return context().db_engine.scoped_session() diff --git a/backend/dataall/modules/vote/__init__.py b/backend/dataall/modules/vote/__init__.py new file mode 100644 index 000000000..e39fb7624 --- /dev/null +++ b/backend/dataall/modules/vote/__init__.py @@ -0,0 +1,19 @@ +from typing import Set, List, Type + +from dataall.base.loader import ModuleInterface, ImportMode + + +class VoteApiModuleInterface(ModuleInterface): + + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + from dataall.modules.catalog import CatalogApiModuleInterface + + return [CatalogApiModuleInterface] + + def __init__(self): + import dataall.modules.vote.api diff --git a/backend/dataall/modules/vote/api/__init__.py b/backend/dataall/modules/vote/api/__init__.py new file mode 100644 index 000000000..a2f3d4f65 --- /dev/null +++ b/backend/dataall/modules/vote/api/__init__.py @@ -0,0 +1,9 @@ +from . import ( + input_types, + queries, + resolvers, + mutations, + types, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations'] diff --git a/backend/dataall/modules/vote/api/input_types.py b/backend/dataall/modules/vote/api/input_types.py new file mode 100644 index 000000000..9e225ca0e --- /dev/null +++ b/backend/dataall/modules/vote/api/input_types.py @@ -0,0 +1,10 @@ +from dataall.base.api import gql + +VoteInput = gql.InputType( + name='VoteInput', + arguments=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + gql.Argument(name='upvote', type=gql.NonNullableType(gql.Boolean)), + ], +) diff --git a/backend/dataall/modules/vote/api/mutations.py b/backend/dataall/modules/vote/api/mutations.py new file mode 100644 index 000000000..548a0f2ed --- /dev/null +++ b/backend/dataall/modules/vote/api/mutations.py @@ -0,0 +1,12 @@ +from dataall.base.api import gql +from dataall.modules.vote.api.resolvers import upvote + + +upVote = gql.MutationField( + name='upVote', + type=gql.Ref('Vote'), + args=[ + gql.Argument(name='input', type=gql.NonNullableType(gql.Ref('VoteInput'))), + ], + resolver=upvote, +) diff --git a/backend/dataall/modules/vote/api/queries.py b/backend/dataall/modules/vote/api/queries.py new file mode 100644 index 000000000..86788986b --- /dev/null +++ b/backend/dataall/modules/vote/api/queries.py @@ -0,0 +1,24 @@ +from dataall.base.api import gql +from dataall.modules.vote.api.resolvers import count_upvotes, get_vote + + +countUpVotes = gql.QueryField( + name='countUpVotes', + type=gql.Integer, + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + resolver=count_upvotes, +) + + +getVote = gql.QueryField( + name='getVote', + type=gql.Ref('Vote'), + args=[ + gql.Argument(name='targetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='targetType', type=gql.NonNullableType(gql.String)), + ], + resolver=get_vote, +) diff --git a/backend/dataall/modules/vote/api/resolvers.py b/backend/dataall/modules/vote/api/resolvers.py new file mode 100644 index 000000000..231d35d96 --- /dev/null +++ b/backend/dataall/modules/vote/api/resolvers.py @@ -0,0 +1,42 @@ +from typing import Dict, Type + +from dataall.modules.vote.db.vote_repositories import Vote +from dataall.modules.catalog.indexers.base_indexer import BaseIndexer + +_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} + + +def add_vote_type(target_type: str, indexer: Type[BaseIndexer]): + _VOTE_TYPES[target_type] = indexer + + +def count_upvotes( + context, source, targetUri: str = None, targetType: str = None +): + with context.engine.scoped_session() as session: + return Vote.count_upvotes( + session=session, + uri=targetUri, + target_type=targetType + ) + + +def upvote(context, source, input=None): + with context.engine.scoped_session() as session: + vote = Vote.upvote( + session=session, + uri=input['targetUri'], + data=input, + ) + + _VOTE_TYPES[vote.targetType].upsert(session, vote.targetUri) + return vote + + +def get_vote(context, source, targetUri: str = None, targetType: str = None): + with context.engine.scoped_session() as session: + return Vote.find_vote( + session=session, + target_uri=targetUri, + target_type=targetType + ) diff --git a/backend/dataall/modules/vote/api/types.py b/backend/dataall/modules/vote/api/types.py new file mode 100644 index 000000000..7a0600984 --- /dev/null +++ b/backend/dataall/modules/vote/api/types.py @@ -0,0 +1,12 @@ +from dataall.base.api import gql + +Vote = gql.ObjectType( + name='Vote', + fields=[ + gql.Field(name='voteUri', type=gql.ID), + gql.Field(name='targetType', type=gql.String), + gql.Field(name='targetUri', type=gql.String), + gql.Field(name='upvote', type=gql.Boolean), + gql.Field(name='created', type=gql.String), + ], +) diff --git a/backend/dataall/modules/vote/db/__init__.py b/backend/dataall/modules/vote/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/vote/db/vote_models.py b/backend/dataall/modules/vote/db/vote_models.py new file mode 100644 index 000000000..6ee977df1 --- /dev/null +++ b/backend/dataall/modules/vote/db/vote_models.py @@ -0,0 +1,23 @@ +import datetime + +from sqlalchemy import Column, String, Boolean, DateTime + +from dataall.base.db import Base, utils + + +class Vote(Base): + __tablename__ = 'vote' + voteUri = Column(String, primary_key=True, default=utils.uuid('vote')) + username = Column(String, nullable=False) + targetUri = Column(String, nullable=False) + targetType = Column(String, nullable=False) + upvote = Column(Boolean, nullable=True) + created = Column(DateTime, default=datetime.datetime.now) + updated = Column(DateTime, onupdate=datetime.datetime.now) + + def __repr__(self): + if self.upvote: + vote = 'Up' + else: + vote = 'Down' + return f'' diff --git a/backend/dataall/modules/vote/db/vote_repositories.py b/backend/dataall/modules/vote/db/vote_repositories.py new file mode 100644 index 000000000..570fc2ec5 --- /dev/null +++ b/backend/dataall/modules/vote/db/vote_repositories.py @@ -0,0 +1,79 @@ +import logging +from datetime import datetime + +from dataall.base.db import exceptions +from dataall.modules.vote.db import vote_models as models +from dataall.base.context import get_context + +logger = logging.getLogger(__name__) + + +class Vote: + @staticmethod + def upvote(session, uri: str, data: dict = None) -> [models.Vote]: + if not uri: + raise exceptions.RequiredParameter('targetUri') + if not data: + raise exceptions.RequiredParameter('data') + if not data.get('targetType'): + raise exceptions.RequiredParameter('targetType') + if 'upvote' not in data: + raise exceptions.RequiredParameter('upvote') + + vote: models.Vote = ( + session.query(models.Vote) + .filter( + models.Vote.targetUri == uri, + models.Vote.targetType == data['targetType'], + ) + .first() + ) + if vote: + vote.upvote = data['upvote'] + vote.updated = datetime.now() + + else: + vote: models.Vote = models.Vote( + username=get_context().username, + targetUri=uri, + targetType=data['targetType'], + upvote=data['upvote'], + ) + session.add(vote) + + session.commit() + return vote + + @staticmethod + def count_upvotes(session, uri, target_type) -> dict: + return ( + session.query(models.Vote) + .filter( + models.Vote.targetUri == uri, + models.Vote.targetType == target_type, + models.Vote.upvote == True, + ) + .count() + ) + + @staticmethod + def find_vote(session, target_uri, target_type) -> [models.Vote]: + return ( + session.query(models.Vote) + .filter( + models.Vote.targetUri == target_uri, + models.Vote.targetType == target_type, + ) + .first() + ) + + @staticmethod + def delete_votes(session, target_uri, target_type) -> [models.Vote]: + return ( + session.query(models.Vote) + .filter( + models.Vote.targetUri == target_uri, + models.Vote.targetType == target_type, + ) + .delete() + ) diff --git a/backend/dataall/modules/worksheets/__init__.py b/backend/dataall/modules/worksheets/__init__.py new file mode 100644 index 000000000..fb766284d --- /dev/null +++ b/backend/dataall/modules/worksheets/__init__.py @@ -0,0 +1,38 @@ +"""Contains the code related to worksheets""" +import logging + +from dataall.core.environment.services.environment_resource_manager import EnvironmentResourceManager +from dataall.base.loader import ImportMode, ModuleInterface +from dataall.modules.worksheets.db.worksheet_models import Worksheet +from dataall.modules.worksheets.db.worksheet_repositories import WorksheetRepository + +log = logging.getLogger(__name__) + + +class WorksheetApiModuleInterface(ModuleInterface): + """Implements ModuleInterface for worksheet GraphQl lambda""" + + @staticmethod + def is_supported(modes): + return ImportMode.API in modes + + def __init__(self): + + import dataall.modules.worksheets.api + + EnvironmentResourceManager.register(WorksheetRepository()) + + log.info("API of worksheets has been imported") + + +class WorksheetCdkModuleInterface(ModuleInterface): + """Implements ModuleInterface for worksheet""" + + @staticmethod + def is_supported(modes): + return ImportMode.CDK in modes + + def __init__(self): + import dataall.modules.worksheets.cdk + + log.info("CDK module of worksheets has been imported") diff --git a/backend/dataall/modules/worksheets/api/__init__.py b/backend/dataall/modules/worksheets/api/__init__.py new file mode 100644 index 000000000..1251b301b --- /dev/null +++ b/backend/dataall/modules/worksheets/api/__init__.py @@ -0,0 +1,10 @@ +from dataall.modules.worksheets.api import ( + input_types, + mutations, + queries, + resolvers, + types, + enums, +) + +__all__ = ['resolvers', 'types', 'input_types', 'queries', 'mutations', 'enums'] diff --git a/backend/dataall/modules/worksheets/api/enums.py b/backend/dataall/modules/worksheets/api/enums.py new file mode 100644 index 000000000..3e9549f2a --- /dev/null +++ b/backend/dataall/modules/worksheets/api/enums.py @@ -0,0 +1,7 @@ +from dataall.base.api.constants import GraphQLEnumMapper + + +class WorksheetRole(GraphQLEnumMapper): + Creator = '950' + Admin = '900' + NoPermission = '000' diff --git a/backend/dataall/modules/worksheets/api/input_types.py b/backend/dataall/modules/worksheets/api/input_types.py new file mode 100644 index 000000000..c64ec2bdb --- /dev/null +++ b/backend/dataall/modules/worksheets/api/input_types.py @@ -0,0 +1,81 @@ +from dataall.base.api import gql + +NewWorksheetInput = gql.InputType( + name='NewWorksheetInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='SamlAdminGroupName', type=gql.NonNullableType(gql.String)), + ], +) + +UpdateWorksheetInput = gql.InputType( + name='UpdateWorksheetInput', + arguments=[ + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + gql.Argument(name='tags', type=gql.ArrayType(gql.String)), + gql.Argument(name='sqlBody', type=gql.String), + gql.Argument(name='chartConfig', type=gql.Ref('WorksheetChartConfigInput')), + ], +) + + +WorksheetChartInput = gql.InputType( + name='WorksheetChartInput', + arguments=[ + gql.Argument(name='chartConfig', type=gql.String), + gql.Argument(name='label', type=gql.String), + gql.Argument(name='description', type=gql.String), + ], +) + +WorksheetQueryInput = gql.InputType( + name='WorksheetQueryInput', + arguments=[ + gql.Argument(name='sqlBody', type=gql.String), + gql.Argument(name='AthenaQueryId', type=gql.String), + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + ], +) + + +WorksheetFilter = gql.InputType( + name='WorksheetFilter', + arguments=[ + gql.Argument(name='term', type=gql.String), + gql.Argument(name='page', type=gql.Integer), + gql.Argument(name='pageSize', type=gql.Integer), + ], +) + + +WorksheetDimensionInput = gql.InputType( + name='WorksheetDimensionInput', + arguments=[ + gql.Argument(name='columnName', type=gql.String), + ], +) + +WorksheetMeasureInput = gql.InputType( + name='WorksheetMeasureInput', + arguments=[ + gql.Argument(name='columnName', type=gql.String), + gql.Argument(name='aggregationName', type=gql.String), + ], +) + + +WorksheetChartConfigInput = gql.InputType( + name='WorksheetChartConfigInput', + arguments=[ + gql.Argument(name='chartType', type=gql.String), + gql.Argument( + name='dimensions', type=gql.ArrayType(gql.Ref('WorksheetDimensionInput')) + ), + gql.Argument( + name='measures', type=gql.ArrayType(gql.Ref('WorksheetMeasureInput')) + ), + ], +) diff --git a/backend/dataall/modules/worksheets/api/mutations.py b/backend/dataall/modules/worksheets/api/mutations.py new file mode 100644 index 000000000..611e6a691 --- /dev/null +++ b/backend/dataall/modules/worksheets/api/mutations.py @@ -0,0 +1,29 @@ +from dataall.base.api import gql +from dataall.modules.worksheets.api.resolvers import * + + +createWorksheet = gql.MutationField( + name='createWorksheet', + args=[gql.Argument(name='input', type=gql.Ref('NewWorksheetInput'))], + type=gql.Ref('Worksheet'), + resolver=create_worksheet, +) + +updateWorksheet = gql.MutationField( + name='updateWorksheet', + resolver=update_worksheet, + args=[ + gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='input', type=gql.Ref('UpdateWorksheetInput')), + ], + type=gql.Ref('Worksheet'), +) + +deleteWorksheet = gql.MutationField( + name='deleteWorksheet', + resolver=delete_worksheet, + args=[ + gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), + ], + type=gql.Boolean, +) diff --git a/backend/dataall/modules/worksheets/api/queries.py b/backend/dataall/modules/worksheets/api/queries.py new file mode 100644 index 000000000..d73de0c1c --- /dev/null +++ b/backend/dataall/modules/worksheets/api/queries.py @@ -0,0 +1,30 @@ +from dataall.base.api import gql +from dataall.modules.worksheets.api.resolvers import * + + +getWorksheet = gql.QueryField( + name='getWorksheet', + type=gql.Ref('Worksheet'), + resolver=get_worksheet, + args=[gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String))], +) + + +listWorksheets = gql.QueryField( + name='listWorksheets', + resolver=list_worksheets, + args=[gql.Argument(name='filter', type=gql.Ref('WorksheetFilter'))], + type=gql.Ref('Worksheets'), +) + + +runAthenaSqlQuery = gql.QueryField( + name='runAthenaSqlQuery', + type=gql.Ref('AthenaQueryResult'), + args=[ + gql.Argument(name='environmentUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='worksheetUri', type=gql.NonNullableType(gql.String)), + gql.Argument(name='sqlQuery', type=gql.NonNullableType(gql.String)), + ], + resolver=run_sql_query, +) diff --git a/backend/dataall/modules/worksheets/api/resolvers.py b/backend/dataall/modules/worksheets/api/resolvers.py new file mode 100644 index 000000000..a2733d206 --- /dev/null +++ b/backend/dataall/modules/worksheets/api/resolvers.py @@ -0,0 +1,85 @@ +from dataall.base.db import exceptions +from dataall.modules.worksheets.api.enums import WorksheetRole +from dataall.modules.worksheets.db.worksheet_models import Worksheet +from dataall.modules.worksheets.db.worksheet_repositories import WorksheetRepository +from dataall.modules.worksheets.services.worksheet_service import WorksheetService +from dataall.base.api.context import Context + + +def create_worksheet(context: Context, source, input: dict = None): + if not input: + raise exceptions.RequiredParameter(input) + if not input.get('SamlAdminGroupName'): + raise exceptions.RequiredParameter('groupUri') + if not input.get('label'): + raise exceptions.RequiredParameter('label') + + with context.engine.scoped_session() as session: + return WorksheetService.create_worksheet( + session=session, + username=context.username, + uri=None, + data=input, + ) + + +def update_worksheet( + context: Context, source, worksheetUri: str = None, input: dict = None +): + with context.engine.scoped_session() as session: + return WorksheetService.update_worksheet( + session=session, + username=context.username, + uri=worksheetUri, + data=input + ) + + +def get_worksheet(context: Context, source, worksheetUri: str = None): + with context.engine.scoped_session() as session: + return WorksheetService.get_worksheet( + session=session, + uri=worksheetUri, + ) + + +def resolve_user_role(context: Context, source: Worksheet): + if context.username and source.owner == context.username: + return WorksheetRole.Creator.value + elif context.groups and source.SamlAdminGroupName in context.groups: + return WorksheetRole.Admin.value + return WorksheetRole.NoPermission.value + + +def list_worksheets(context, source, filter: dict = None): + if not filter: + filter = {} + with context.engine.scoped_session() as session: + return WorksheetRepository.paginated_user_worksheets( + session=session, + username=context.username, + groups=context.groups, + uri=None, + data=filter, + check_perm=True, + ) + + +def run_sql_query( + context: Context, source, environmentUri: str = None, worksheetUri: str = None, sqlQuery: str = None +): + with context.engine.scoped_session() as session: + return WorksheetService.run_sql_query( + session=session, + uri=environmentUri, + worksheetUri=worksheetUri, + sqlQuery=sqlQuery + ) + + +def delete_worksheet(context, source, worksheetUri: str = None): + with context.engine.scoped_session() as session: + return WorksheetService.delete_worksheet( + session=session, + uri=worksheetUri + ) diff --git a/backend/dataall/modules/worksheets/api/types.py b/backend/dataall/modules/worksheets/api/types.py new file mode 100644 index 000000000..eb27ed019 --- /dev/null +++ b/backend/dataall/modules/worksheets/api/types.py @@ -0,0 +1,130 @@ +from dataall.base.api import gql +from dataall.modules.worksheets.api.resolvers import resolve_user_role + +AthenaResultColumnDescriptor = gql.ObjectType( + name='AthenaResultColumnDescriptor', + fields=[ + gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), + gql.Field(name='typeName', type=gql.NonNullableType(gql.String)), + ], +) + + +AthenaResultRecordCell = gql.ObjectType( + name='AthenaResultRecordCell', + fields=[ + gql.Field(name='value', type=gql.String), + gql.Field(name='typeName', type=gql.NonNullableType(gql.String)), + gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), + ], +) + +AthenaResultRecord = gql.ObjectType( + name='AthenaResultRecord', + fields=[ + gql.Field(name='cells', type=gql.ArrayType(gql.Ref('AthenaResultRecordCell'))) + ], +) + + +AthenaQueryResult = gql.ObjectType( + name='AthenaQueryResult', + fields=[ + gql.Field(name='Error', type=gql.String), + gql.Field(name='OutputLocation', type=gql.String), + gql.Field(name='AthenaQueryId', type=gql.String), + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='ElapsedTimeInMs', type=gql.Integer), + gql.Field(name='DataScannedInBytes', type=gql.Integer), + gql.Field(name='Status', type=gql.String), + gql.Field( + name='columns', type=gql.ArrayType(gql.Ref('AthenaResultColumnDescriptor')) + ), + gql.Field(name='rows', type=gql.ArrayType(gql.Ref('AthenaResultRecord'))), + ], +) + + +Worksheet = gql.ObjectType( + name='Worksheet', + fields=[ + gql.Field(name='worksheetUri', type=gql.ID), + gql.Field(name='label', type=gql.String), + gql.Field(name='name', type=gql.String), + gql.Field(name='tags', type=gql.ArrayType(gql.String)), + gql.Field(name='description', type=gql.String), + gql.Field(name='sqlBody', type=gql.String), + gql.Field(name='chartConfig', type=gql.Ref('WorksheetChartConfig')), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + gql.Field(name='updated', type=gql.String), + gql.Field(name='owner', type=gql.NonNullableType(gql.String)), + gql.Field(name='SamlAdminGroupName', type=gql.String), + gql.Field( + name='lastSavedQueryResult', + type=gql.Ref('AthenaQueryResult'), + ), + gql.Field( + name='userRoleForWorksheet', + type=gql.Ref('WorksheetRole'), + resolver=resolve_user_role, + ), + ], +) + + +Worksheets = gql.ObjectType( + name='Worksheets', + fields=[ + gql.Field(name='count', type=gql.Integer), + gql.Field(name='page', type=gql.Integer), + gql.Field(name='pages', type=gql.Integer), + gql.Field(name='hasNext', type=gql.Boolean), + gql.Field(name='hasPrevious', type=gql.Boolean), + gql.Field(name='nodes', type=gql.ArrayType(gql.Ref('Worksheet'))), + ], +) + + +WorksheetQueryResult = gql.ObjectType( + name='WorksheetQueryResult', + fields=[ + gql.Field(name='worksheetQueryResultUri', type=gql.ID), + gql.Field(name='queryType', type=gql.NonNullableType(gql.String)), + gql.Field(name='sqlBody', type=gql.NonNullableType(gql.String)), + gql.Field(name='AthenaQueryId', type=gql.NonNullableType(gql.String)), + gql.Field(name='region', type=gql.NonNullableType(gql.String)), + gql.Field(name='AwsAccountId', type=gql.NonNullableType(gql.String)), + gql.Field(name='AthenaOutputBucketName', type=gql.NonNullableType(gql.String)), + gql.Field(name='AthenaOutputKey', type=gql.NonNullableType(gql.String)), + gql.Field(name='timeElapsedInSecond', type=gql.NonNullableType(gql.Integer)), + gql.Field(name='created', type=gql.NonNullableType(gql.String)), + ], +) + + +WorksheetChartDimension = gql.ObjectType( + name='WorksheetChartDimension', + fields=[gql.Field(name='columnName', type=gql.NonNullableType(gql.String))], +) + +WorksheetChartMeasure = gql.ObjectType( + name='WorksheetChartMeasure', + fields=[ + gql.Field(name='columnName', type=gql.NonNullableType(gql.String)), + gql.Field(name='aggregationName', type=gql.String), + ], +) + +WorksheetChartConfig = gql.ObjectType( + name='WorksheetChartConfig', + fields=[ + gql.Field(name='AthenaQueryId', type=gql.String), + gql.Field( + name='dimensions', type=gql.ArrayType(gql.Ref('WorksheetChartDimension')) + ), + gql.Field( + name='measures', type=gql.ArrayType(gql.Ref('WorksheetChartMeasure')) + ), + ], +) diff --git a/backend/dataall/modules/worksheets/aws/__init__.py b/backend/dataall/modules/worksheets/aws/__init__.py new file mode 100644 index 000000000..1315c5b1f --- /dev/null +++ b/backend/dataall/modules/worksheets/aws/__init__.py @@ -0,0 +1 @@ +"""Contains code that send requests to AWS Athena """ diff --git a/backend/dataall/modules/worksheets/aws/athena_client.py b/backend/dataall/modules/worksheets/aws/athena_client.py new file mode 100644 index 000000000..c0d9e54ab --- /dev/null +++ b/backend/dataall/modules/worksheets/aws/athena_client.py @@ -0,0 +1,48 @@ +from pyathena import connect +from dataall.base.aws.sts import SessionHelper + + +class AthenaClient: + """ Makes requests to AWS Athena """ + + @staticmethod + def run_athena_query(aws_account_id, env_group, s3_staging_dir, region, sql=None): + base_session = SessionHelper.remote_session(accountid=aws_account_id) + boto3_session = SessionHelper.get_session(base_session=base_session, role_arn=env_group.environmentIAMRoleArn) + creds = boto3_session.get_credentials() + connection = connect( + aws_access_key_id=creds.access_key, + aws_secret_access_key=creds.secret_key, + aws_session_token=creds.token, + work_group=env_group.environmentAthenaWorkGroup, + s3_staging_dir=s3_staging_dir, + region_name=region, + ) + cursor = connection.cursor() + cursor.execute(sql) + + return cursor + + @staticmethod + def convert_query_output(cursor): + columns = [] + for f in cursor.description: + columns.append({'columnName': f[0], 'typeName': 'String'}) + + rows = [] + for row in cursor: + record = {'cells': []} + for col_position, column in enumerate(columns): + cell = {} + cell['columnName'] = column['columnName'] + cell['typeName'] = column['typeName'] + cell['value'] = str(row[col_position]) + record['cells'].append(cell) + rows.append(record) + return { + 'error': None, + 'AthenaQueryId': cursor.query_id, + 'ElapsedTime': cursor.total_execution_time_in_millis, + 'rows': rows, + 'columns': columns, + } diff --git a/backend/dataall/modules/worksheets/cdk/__init__.py b/backend/dataall/modules/worksheets/cdk/__init__.py new file mode 100644 index 000000000..5acbc1b00 --- /dev/null +++ b/backend/dataall/modules/worksheets/cdk/__init__.py @@ -0,0 +1,3 @@ +from dataall.modules.worksheets.cdk import pivot_role_worksheets_policy + +__all__ = ["pivot_role_worksheets_policy"] diff --git a/backend/dataall/modules/worksheets/cdk/pivot_role_worksheets_policy.py b/backend/dataall/modules/worksheets/cdk/pivot_role_worksheets_policy.py new file mode 100644 index 000000000..ee8aed4a6 --- /dev/null +++ b/backend/dataall/modules/worksheets/cdk/pivot_role_worksheets_policy.py @@ -0,0 +1,25 @@ +from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet +from aws_cdk import aws_iam as iam + + +class WorksheetsPivotRole(PivotRoleStatementSet): + """ + Class including all permissions needed by the pivot role to work with Athena + It allows pivot role to: + - .... + """ + def get_statements(self): + statements = [ + iam.PolicyStatement( + sid='AthenaWorkgroups', + effect=iam.Effect.ALLOW, + actions=[ + "athena:GetQueryExecution", + "athena:GetQueryResults", + "athena:GetWorkGroup", + "athena:StartQueryExecution" + ], + resources=[f'arn:aws:athena:*:{self.account}:workgroup/{self.env_resource_prefix}*'], + ) + ] + return statements diff --git a/backend/dataall/modules/worksheets/db/__init__.py b/backend/dataall/modules/worksheets/db/__init__.py new file mode 100644 index 000000000..dbd4edb44 --- /dev/null +++ b/backend/dataall/modules/worksheets/db/__init__.py @@ -0,0 +1 @@ +"""Contains code to interact with the database""" diff --git a/backend/dataall/modules/worksheets/db/worksheet_models.py b/backend/dataall/modules/worksheets/db/worksheet_models.py new file mode 100644 index 000000000..6549cb96c --- /dev/null +++ b/backend/dataall/modules/worksheets/db/worksheet_models.py @@ -0,0 +1,41 @@ +import datetime +import enum + +from sqlalchemy import Column, DateTime, Integer, Enum, String +from sqlalchemy.dialects import postgresql +from sqlalchemy.orm import query_expression + +from dataall.base.db import Base +from dataall.base.db import Resource, utils + + +class QueryType(enum.Enum): + chart = 'chart' + data = 'data' + + +class Worksheet(Resource, Base): + __tablename__ = 'worksheet' + worksheetUri = Column(String, primary_key=True, default=utils.uuid('_')) + SamlAdminGroupName = Column(String, nullable=False) + sqlBody = Column(String, nullable=True) + chartConfig = Column(postgresql.JSON, nullable=True) + userRoleForWorksheet = query_expression() + lastSavedAthenaQueryIdForQuery = Column(String, nullable=True) + lastSavedAthenaQueryIdForChart = Column(String, nullable=True) + + +class WorksheetQueryResult(Base): + __tablename__ = 'worksheet_query_result' + worksheetUri = Column(String, nullable=False) + AthenaQueryId = Column(String, primary_key=True) + status = Column(String, nullable=False) + queryType = Column(Enum(QueryType), nullable=False, default=True) + sqlBody = Column(String, nullable=False) + AwsAccountId = Column(String, nullable=False) + region = Column(String, nullable=False) + OutputLocation = Column(String, nullable=False) + error = Column(String, nullable=True) + ElapsedTimeInMs = Column(Integer, nullable=True) + DataScannedInBytes = Column(Integer, nullable=True) + created = Column(DateTime, default=datetime.datetime.now) diff --git a/backend/dataall/modules/worksheets/db/worksheet_repositories.py b/backend/dataall/modules/worksheets/db/worksheet_repositories.py new file mode 100644 index 000000000..eb2239c1e --- /dev/null +++ b/backend/dataall/modules/worksheets/db/worksheet_repositories.py @@ -0,0 +1,57 @@ +""" +DAO layer that encapsulates the logic and interaction with the database for worksheets +""" +from sqlalchemy import or_ +from sqlalchemy.orm import Query + +from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.base.db import paginate +from dataall.modules.worksheets.db.worksheet_models import Worksheet, WorksheetQueryResult + + +class WorksheetRepository(EnvironmentResource): + """DAO layer for worksheets""" + _DEFAULT_PAGE = 1 + _DEFAULT_PAGE_SIZE = 10 + + @staticmethod + def count_resources(session, environment, group_uri) -> int: + return ( + session.query(WorksheetQueryResult) + .filter( + WorksheetQueryResult.AwsAccountId == environment.AwsAccountId + ) + .count() + ) + + @staticmethod + def find_worksheet_by_uri(session, uri) -> Worksheet: + return session.query(Worksheet).get(uri) + + @staticmethod + def query_user_worksheets(session, username, groups, filter) -> Query: + query = session.query(Worksheet).filter( + or_( + Worksheet.owner == username, + Worksheet.SamlAdminGroupName.in_(groups), + ) + ) + if filter and filter.get('term'): + query = query.filter( + or_( + Worksheet.label.ilike('%' + filter.get('term') + '%'), + Worksheet.description.ilike('%' + filter.get('term') + '%'), + Worksheet.tags.contains(f"{{{filter.get('term')}}}"), + ) + ) + return query + + @staticmethod + def paginated_user_worksheets( + session, username, groups, uri, data=None, check_perm=None + ) -> dict: + return paginate( + query=WorksheetRepository.query_user_worksheets(session, username, groups, data), + page=data.get('page', WorksheetRepository._DEFAULT_PAGE), + page_size=data.get('pageSize', WorksheetRepository._DEFAULT_PAGE_SIZE), + ).to_dict() diff --git a/backend/dataall/modules/worksheets/services/__init__.py b/backend/dataall/modules/worksheets/services/__init__.py new file mode 100644 index 000000000..e7f8e1bdc --- /dev/null +++ b/backend/dataall/modules/worksheets/services/__init__.py @@ -0,0 +1,7 @@ +""" +Contains the code needed for service layer. +The service layer is a layer where all business logic is aggregated +""" +from dataall.modules.worksheets.services import worksheet_service, worksheet_permissions + +__all__ = ["worksheet_service", "worksheet_permissions"] diff --git a/backend/dataall/modules/worksheets/services/worksheet_permissions.py b/backend/dataall/modules/worksheets/services/worksheet_permissions.py new file mode 100644 index 000000000..b64678805 --- /dev/null +++ b/backend/dataall/modules/worksheets/services/worksheet_permissions.py @@ -0,0 +1,40 @@ +from dataall.core.permissions.permissions import TENANT_ALL, TENANT_ALL_WITH_DESC, RESOURCES_ALL, \ + RESOURCES_ALL_WITH_DESC, ENVIRONMENT_INVITED, ENVIRONMENT_INVITATION_REQUEST, ENVIRONMENT_ALL + +MANAGE_WORKSHEETS = 'MANAGE_WORKSHEETS' + +TENANT_ALL.append(MANAGE_WORKSHEETS) +TENANT_ALL_WITH_DESC[MANAGE_WORKSHEETS] = 'Manage worksheets' + +""" +WORKSHEETS +""" +GET_WORKSHEET = 'GET_WORKSHEET' +UPDATE_WORKSHEET = 'UPDATE_WORKSHEET' +DELETE_WORKSHEET = 'DELETE_WORKSHEET' +RUN_WORKSHEET_QUERY = 'RUN_WORKSHEET_QUERY' +WORKSHEET_ALL = [ + GET_WORKSHEET, + UPDATE_WORKSHEET, + DELETE_WORKSHEET, + RUN_WORKSHEET_QUERY, +] + +RESOURCES_ALL.extend(WORKSHEET_ALL) + +for perm in WORKSHEET_ALL: + RESOURCES_ALL_WITH_DESC[perm] = perm + +""" +RUN ATHENA QUERY +""" +RUN_ATHENA_QUERY = 'RUN_ATHENA_QUERY' + +ENVIRONMENT_INVITED.append(RUN_ATHENA_QUERY) + +ENVIRONMENT_INVITATION_REQUEST.append(RUN_ATHENA_QUERY) + +ENVIRONMENT_ALL.append(RUN_ATHENA_QUERY) + +RESOURCES_ALL.append(RUN_ATHENA_QUERY) +RESOURCES_ALL_WITH_DESC[RUN_ATHENA_QUERY] = "Run Athena queries on this environment" diff --git a/backend/dataall/modules/worksheets/services/worksheet_service.py b/backend/dataall/modules/worksheets/services/worksheet_service.py new file mode 100644 index 000000000..024d2ad6f --- /dev/null +++ b/backend/dataall/modules/worksheets/services/worksheet_service.py @@ -0,0 +1,118 @@ +import logging + +from dataall.core.activity.db.activity_models import Activity +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permission_checker import has_tenant_permission, has_resource_permission +from dataall.base.db import exceptions +from dataall.modules.worksheets.aws.athena_client import AthenaClient +from dataall.modules.worksheets.db.worksheet_models import Worksheet +from dataall.modules.worksheets.db.worksheet_repositories import WorksheetRepository +from dataall.modules.worksheets.services.worksheet_permissions import MANAGE_WORKSHEETS, UPDATE_WORKSHEET, \ + WORKSHEET_ALL, GET_WORKSHEET, DELETE_WORKSHEET, RUN_ATHENA_QUERY + + +logger = logging.getLogger(__name__) + + +class WorksheetService: + @staticmethod + def get_worksheet_by_uri(session, uri: str) -> Worksheet: + if not uri: + raise exceptions.RequiredParameter(param_name='worksheetUri') + worksheet = WorksheetRepository.find_worksheet_by_uri(session, uri) + if not worksheet: + raise exceptions.ObjectNotFound('Worksheet', uri) + return worksheet + + @staticmethod + @has_tenant_permission(MANAGE_WORKSHEETS) + def create_worksheet(session, username, uri, data=None) -> Worksheet: + worksheet = Worksheet( + owner=username, + label=data.get('label'), + description=data.get('description', 'No description provided'), + tags=data.get('tags'), + chartConfig={'dimensions': [], 'measures': [], 'chartType': 'bar'}, + SamlAdminGroupName=data['SamlAdminGroupName'], + ) + + session.add(worksheet) + session.commit() + + activity = Activity( + action='WORKSHEET:CREATE', + label='WORKSHEET:CREATE', + owner=username, + summary=f'{username} created worksheet {worksheet.name} ', + targetUri=worksheet.worksheetUri, + targetType='worksheet', + ) + session.add(activity) + + ResourcePolicy.attach_resource_policy( + session=session, + group=data['SamlAdminGroupName'], + permissions=WORKSHEET_ALL, + resource_uri=worksheet.worksheetUri, + resource_type=Worksheet.__name__, + ) + return worksheet + + @staticmethod + @has_resource_permission(UPDATE_WORKSHEET) + def update_worksheet(session, username, uri, data=None): + worksheet = WorksheetService.get_worksheet_by_uri(session, uri) + for field in data.keys(): + setattr(worksheet, field, data.get(field)) + session.commit() + + activity = Activity( + action='WORKSHEET:UPDATE', + label='WORKSHEET:UPDATE', + owner=username, + summary=f'{username} updated worksheet {worksheet.name} ', + targetUri=worksheet.worksheetUri, + targetType='worksheet', + ) + session.add(activity) + return worksheet + + @staticmethod + @has_resource_permission(GET_WORKSHEET) + def get_worksheet(session, uri): + worksheet = WorksheetService.get_worksheet_by_uri(session, uri) + return worksheet + + @staticmethod + @has_resource_permission(DELETE_WORKSHEET) + def delete_worksheet(session, uri) -> bool: + worksheet = WorksheetService.get_worksheet_by_uri(session, uri) + session.delete(worksheet) + ResourcePolicy.delete_resource_policy( + session=session, + group=worksheet.SamlAdminGroupName, + resource_uri=uri, + resource_type=Worksheet.__name__, + ) + return True + + @staticmethod + @has_resource_permission(RUN_ATHENA_QUERY) + def run_sql_query(session, uri, worksheetUri, sqlQuery): + environment = EnvironmentService.get_environment_by_uri(session, uri) + worksheet = WorksheetService.get_worksheet_by_uri(session, worksheetUri) + + env_group = EnvironmentService.get_environment_group( + session, worksheet.SamlAdminGroupName, environment.environmentUri + ) + + cursor = AthenaClient.run_athena_query( + aws_account_id=environment.AwsAccountId, + env_group=env_group, + s3_staging_dir=f's3://{environment.EnvironmentDefaultBucketName}/athenaqueries/{env_group.environmentAthenaWorkGroup}/', + region=environment.region, + sql=sqlQuery + ) + + return AthenaClient.convert_query_output(cursor) diff --git a/backend/dataall/searchproxy/__init__.py b/backend/dataall/searchproxy/__init__.py deleted file mode 100644 index 1a69dac6c..000000000 --- a/backend/dataall/searchproxy/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .connect import connect -from .indexers import upsert_dataset -from .indexers import upsert_table -from .indexers import upsert_dataset_tables -from .search import run_query -from .upsert import upsert - -__all__ = [ - 'connect', - 'run_query', - 'upsert', - 'upsert_dataset', - 'upsert_table', - 'upsert_dataset_tables', -] diff --git a/backend/dataall/searchproxy/indexers.py b/backend/dataall/searchproxy/indexers.py deleted file mode 100644 index 78886716d..000000000 --- a/backend/dataall/searchproxy/indexers.py +++ /dev/null @@ -1,363 +0,0 @@ -import logging - -from sqlalchemy import and_ -from sqlalchemy.orm import with_expression - -from .upsert import upsert -from .. import db -from ..db import models - -log = logging.getLogger(__name__) - - -def get_target_glossary_terms(session, targetUri): - q = ( - session.query(models.TermLink) - .options( - with_expression(models.TermLink.path, models.GlossaryNode.path), - with_expression(models.TermLink.label, models.GlossaryNode.label), - with_expression(models.TermLink.readme, models.GlossaryNode.readme), - ) - .join( - models.GlossaryNode, models.GlossaryNode.nodeUri == models.TermLink.nodeUri - ) - .filter( - and_( - models.TermLink.targetUri == targetUri, - models.TermLink.approvedBySteward.is_(True), - ) - ) - ) - return [t.path for t in q] - - -def upsert_dataset(session, es, datasetUri: str): - dataset = ( - session.query( - models.Dataset.datasetUri.label('datasetUri'), - models.Dataset.name.label('name'), - models.Dataset.owner.label('owner'), - models.Dataset.label.label('label'), - models.Dataset.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.Dataset.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.created, - models.Dataset.updated, - models.Dataset.deleted, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.Dataset.datasetUri == datasetUri) - .first() - ) - count_tables = db.api.Dataset.count_dataset_tables(session, datasetUri) - count_folders = db.api.Dataset.count_dataset_locations(session, datasetUri) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, datasetUri, {'targetType': 'dataset'} - ) - - if dataset: - glossary = get_target_glossary_terms(session, datasetUri) - upsert( - es=es, - index='dataall-index', - id=datasetUri, - doc={ - 'name': dataset.name, - 'owner': dataset.owner, - 'label': dataset.label, - 'admins': dataset.admins, - 'database': dataset.database, - 'source': dataset.source, - 'resourceKind': 'dataset', - 'description': dataset.description, - 'classification': dataset.classification, - 'tags': [t.replace('-', '') for t in dataset.tags or []], - 'topics': dataset.topics, - 'region': dataset.region.replace('-', ''), - 'environmentUri': dataset.envUri, - 'environmentName': dataset.envName, - 'organizationUri': dataset.orgUri, - 'organizationName': dataset.orgName, - 'created': dataset.created, - 'updated': dataset.updated, - 'deleted': dataset.deleted, - 'glossary': glossary, - 'tables': count_tables, - 'folders': count_folders, - 'upvotes': count_upvotes, - }, - ) - return dataset - - -def upsert_table(session, es, tableUri: str): - table = ( - session.query( - models.DatasetTable.datasetUri.label('datasetUri'), - models.DatasetTable.tableUri.label('uri'), - models.DatasetTable.name.label('name'), - models.DatasetTable.owner.label('owner'), - models.DatasetTable.label.label('label'), - models.DatasetTable.description.label('description'), - models.Dataset.confidentiality.label('classification'), - models.DatasetTable.tags.label('tags'), - models.Dataset.topics.label('topics'), - models.Dataset.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.GlueDatabaseName.label('database'), - models.Dataset.S3BucketName.label('source'), - models.DatasetTable.created, - models.DatasetTable.updated, - models.DatasetTable.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == models.DatasetTable.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.DatasetTable.tableUri == tableUri) - .first() - ) - - if table: - glossary = get_target_glossary_terms(session, tableUri) - tags = table.tags if table.tags else [] - upsert( - es=es, - index='dataall-index', - id=tableUri, - doc={ - 'name': table.name, - 'admins': table.admins, - 'owner': table.owner, - 'label': table.label, - 'resourceKind': 'table', - 'description': table.description, - 'database': table.database, - 'source': table.source, - 'classification': table.classification, - 'tags': [t.replace('-', '') for t in tags or []], - 'topics': table.topics, - 'region': table.region.replace('-', ''), - 'datasetUri': table.datasetUri, - 'environmentUri': table.envUri, - 'environmentName': table.envName, - 'organizationUri': table.orgUri, - 'organizationName': table.orgName, - 'created': table.created, - 'updated': table.updated, - 'deleted': table.deleted, - 'glossary': glossary, - }, - ) - upsert_dataset(session, es, table.datasetUri) - return table - - -def upsert_folder(session, es, locationUri: str): - folder = ( - session.query( - models.DatasetStorageLocation.datasetUri.label('datasetUri'), - models.DatasetStorageLocation.locationUri.label('uri'), - models.DatasetStorageLocation.name.label('name'), - models.DatasetStorageLocation.owner.label('owner'), - models.DatasetStorageLocation.label.label('label'), - models.DatasetStorageLocation.description.label('description'), - models.DatasetStorageLocation.tags.label('tags'), - models.DatasetStorageLocation.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dataset.SamlAdminGroupName.label('admins'), - models.Dataset.S3BucketName.label('source'), - models.Dataset.topics.label('topics'), - models.Dataset.confidentiality.label('classification'), - models.DatasetStorageLocation.created, - models.DatasetStorageLocation.updated, - models.DatasetStorageLocation.deleted, - ) - .join( - models.Dataset, - models.Dataset.datasetUri == models.DatasetStorageLocation.datasetUri, - ) - .join( - models.Organization, - models.Dataset.organizationUri == models.Organization.organizationUri, - ) - .join( - models.Environment, - models.Dataset.environmentUri == models.Environment.environmentUri, - ) - .filter(models.DatasetStorageLocation.locationUri == locationUri) - .first() - ) - if folder: - glossary = get_target_glossary_terms(session, locationUri) - upsert( - es=es, - index='dataall-index', - id=locationUri, - doc={ - 'name': folder.name, - 'admins': folder.admins, - 'owner': folder.owner, - 'label': folder.label, - 'resourceKind': 'folder', - 'description': folder.description, - 'source': folder.source, - 'classification': folder.classification, - 'tags': [f.replace('-', '') for f in folder.tags or []], - 'topics': folder.topics, - 'region': folder.region.replace('-', ''), - 'datasetUri': folder.datasetUri, - 'environmentUri': folder.envUri, - 'environmentName': folder.envName, - 'organizationUri': folder.orgUri, - 'organizationName': folder.orgName, - 'created': folder.created, - 'updated': folder.updated, - 'deleted': folder.deleted, - 'glossary': glossary, - }, - ) - upsert_dataset(session, es, folder.datasetUri) - return folder - - -def upsert_dashboard(session, es, dashboardUri: str): - dashboard = ( - session.query( - models.Dashboard.dashboardUri.label('uri'), - models.Dashboard.name.label('name'), - models.Dashboard.owner.label('owner'), - models.Dashboard.label.label('label'), - models.Dashboard.description.label('description'), - models.Dashboard.tags.label('tags'), - models.Dashboard.region.label('region'), - models.Organization.organizationUri.label('orgUri'), - models.Organization.name.label('orgName'), - models.Environment.environmentUri.label('envUri'), - models.Environment.name.label('envName'), - models.Dashboard.SamlGroupName.label('admins'), - models.Dashboard.created, - models.Dashboard.updated, - models.Dashboard.deleted, - ) - .join( - models.Organization, - models.Dashboard.organizationUri == models.Dashboard.organizationUri, - ) - .join( - models.Environment, - models.Dashboard.environmentUri == models.Environment.environmentUri, - ) - .filter(models.Dashboard.dashboardUri == dashboardUri) - .first() - ) - if dashboard: - glossary = get_target_glossary_terms(session, dashboardUri) - count_upvotes = db.api.Vote.count_upvotes( - session, None, None, dashboardUri, {'targetType': 'dashboard'} - ) - upsert( - es=es, - index='dataall-index', - id=dashboardUri, - doc={ - 'name': dashboard.name, - 'admins': dashboard.admins, - 'owner': dashboard.owner, - 'label': dashboard.label, - 'resourceKind': 'dashboard', - 'description': dashboard.description, - 'tags': [f.replace('-', '') for f in dashboard.tags or []], - 'topics': [], - 'region': dashboard.region.replace('-', ''), - 'environmentUri': dashboard.envUri, - 'environmentName': dashboard.envName, - 'organizationUri': dashboard.orgUri, - 'organizationName': dashboard.orgName, - 'created': dashboard.created, - 'updated': dashboard.updated, - 'deleted': dashboard.deleted, - 'glossary': glossary, - 'upvotes': count_upvotes, - }, - ) - return dashboard - - -def upsert_dataset_tables(session, es, datasetUri: str): - tables = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == datasetUri, - models.DatasetTable.LastGlueTableStatus != 'Deleted', - ) - ) - .all() - ) - for table in tables: - upsert_table(session, es, table.tableUri) - return tables - - -def remove_deleted_tables(session, es, datasetUri: str): - tables = ( - session.query(models.DatasetTable) - .filter( - and_( - models.DatasetTable.datasetUri == datasetUri, - models.DatasetTable.LastGlueTableStatus == 'Deleted', - ) - ) - .all() - ) - for table in tables: - delete_doc(es, doc_id=table.tableUri) - return tables - - -def upsert_dataset_folders(session, es, datasetUri: str): - folders = ( - session.query(models.DatasetStorageLocation) - .filter(models.DatasetStorageLocation.datasetUri == datasetUri) - .all() - ) - for folder in folders: - upsert_folder(session, es, folder.locationUri) - return folders - - -def delete_doc(es, doc_id, index='dataall-index'): - es.delete(index=index, id=doc_id, ignore=[400, 404]) - return True diff --git a/backend/dataall/searchproxy/upsert.py b/backend/dataall/searchproxy/upsert.py deleted file mode 100644 index 0fd9735e5..000000000 --- a/backend/dataall/searchproxy/upsert.py +++ /dev/null @@ -1,15 +0,0 @@ -import logging -from datetime import datetime - -log = logging.getLogger(__name__) - - -def upsert(es, index, id, doc): - doc['_indexed'] = datetime.now() - if es: - res = es.index(index=index, id=id, body=doc) - log.info(f'doc {doc} for id {id} indexed with response {res}') - return True - else: - log.error(f'ES config is missing doc {doc} for id {id} was not indexed') - return False diff --git a/backend/dataall/tasks/__init__.py b/backend/dataall/tasks/__init__.py deleted file mode 100644 index 02ccaaa8b..000000000 --- a/backend/dataall/tasks/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .tables_syncer import sync_tables -from .catalog_indexer import index_objects diff --git a/backend/dataall/tasks/bucket_policy_updater.py b/backend/dataall/tasks/bucket_policy_updater.py deleted file mode 100644 index 5b8f322be..000000000 --- a/backend/dataall/tasks/bucket_policy_updater.py +++ /dev/null @@ -1,324 +0,0 @@ -import json -import logging -import os -import sys -import typing - -from botocore.exceptions import ClientError -from sqlalchemy import and_ - -from ..aws.handlers.sts import SessionHelper -from ..db import get_engine -from ..db import models, api - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -class BucketPoliciesUpdater: - def __init__(self, engine, event=None): - self.engine = engine - self.event = event - self.reports = [] - - def sync_imported_datasets_bucket_policies(self): - with self.engine.scoped_session() as session: - imported_datasets = ( - session.query(models.Dataset) - .filter( - and_( - models.Dataset.imported == True, - models.Dataset.deleted.is_(None), - ) - ) - .all() - ) - log.info(f'Found {len(imported_datasets)} imported datasets') - - for dataset in imported_datasets: - account_prefixes = {} - - shared_tables = self.get_shared_tables(dataset) - log.info( - f'Found {len(shared_tables)} shared tables with dataset {dataset.S3BucketName}' - ) - - shared_folders = self.get_shared_folders(dataset) - log.info( - f'Found {len(shared_folders)} shared folders with dataset {dataset.S3BucketName}' - ) - - for table in shared_tables: - data_prefix = self.clear_table_location_from_delta_path(table) - prefix = data_prefix.rstrip('/') + '/*' - accountid = table.TargetAwsAccountId - - prefix = f"arn:aws:s3:::{prefix.split('s3://')[1]}" - self.group_prefixes_by_accountid( - accountid, prefix, account_prefixes - ) - - bucket = ( - f"arn:aws:s3:::{prefix.split('arn:aws:s3:::')[1].split('/')[0]}" - ) - self.group_prefixes_by_accountid( - accountid, bucket, account_prefixes - ) - - for folder in shared_folders: - prefix = f'arn:aws:s3:::{folder.S3Prefix}' + '/*' - accountid = folder.AwsAccountId - self.group_prefixes_by_accountid( - accountid, prefix, account_prefixes - ) - bucket = ( - f"arn:aws:s3:::{prefix.split('arn:aws:s3:::')[1].split('/')[0]}" - ) - self.group_prefixes_by_accountid( - accountid, bucket, account_prefixes - ) - - client = self.init_s3_client(dataset) - - policy = self.get_bucket_policy(client, dataset) - - BucketPoliciesUpdater.update_policy(account_prefixes, policy) - - report = self.put_bucket_policy(client, dataset, policy) - - self.reports.append(report) - - if any(r['status'] == 'FAILED' for r in self.reports): - raise Exception( - 'Failed to update one or more bucket policies' - f'Check the reports: {self.reports}' - ) - return self.reports - - @staticmethod - def clear_table_location_from_delta_path(table): - data_prefix = ( - table.S3Prefix - if '/packages.delta' not in table.S3Prefix - else table.S3Prefix.replace('/packages.delta', '') - ) - data_prefix = ( - data_prefix - if '/_symlink_format_manifest' not in data_prefix - else data_prefix.replace('/_symlink_format_manifest', '') - ) - return data_prefix - - @staticmethod - def update_policy(account_prefixes, policy): - log.info('Updating Policy') - statements = policy['Statement'] - for key, value in account_prefixes.items(): - added = False - for s in statements: - if key in s.get('Principal').get('AWS') and 'DA' in s.get('Sid'): - log.info(f'Principal already on the policy {key}') - added = True - for v in value: - if v not in s.get('Resource'): - existing_resources = ( - list(s.get('Resource')) - if not isinstance(s.get('Resource'), list) - else s.get('Resource') - ) - existing_resources.append(v) - s['Resource'] = existing_resources - break - if not added: - log.info( - f'Principal {key} with permissions {value} ' - f'Not on the policy adding it' - ) - statements.append( - { - 'Sid': f'DA{key}', - 'Effect': 'Allow', - 'Action': ['s3:Get*', 's3:List*'], - 'Resource': value - if isinstance(value, list) and len(value) > 1 - else value, - 'Principal': {'AWS': key}, - } - ) - policy.update({'Statement': statements}) - log.info(f'Final Policy --> {policy}') - return policy - - @classmethod - def group_prefixes_by_accountid(cls, accountid, prefix, account_prefixes): - if account_prefixes.get(accountid): - prefixes = account_prefixes[accountid] - if prefix not in prefixes: - prefixes.append(prefix) - account_prefixes[accountid] = prefixes - else: - account_prefixes[accountid] = [prefix] - return account_prefixes - - def get_shared_tables(self, dataset) -> typing.List[models.ShareObjectItem]: - with self.engine.scoped_session() as session: - tables = ( - session.query( - models.DatasetTable.GlueDatabaseName.label('GlueDatabaseName'), - models.DatasetTable.GlueTableName.label('GlueTableName'), - models.DatasetTable.S3Prefix.label('S3Prefix'), - models.DatasetTable.AWSAccountId.label('SourceAwsAccountId'), - models.DatasetTable.region.label('SourceRegion'), - models.Environment.AwsAccountId.label('TargetAwsAccountId'), - models.Environment.region.label('TargetRegion'), - ) - .join( - models.ShareObjectItem, - and_( - models.ShareObjectItem.itemUri == models.DatasetTable.tableUri - ), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.DatasetTable.datasetUri == dataset.datasetUri, - models.DatasetTable.deleted.is_(None), - models.ShareObjectItem.status - == models.Enums.ShareObjectStatus.Approved.value, - ) - ) - ).all() - return tables - - def get_shared_folders(self, dataset) -> typing.List[models.DatasetStorageLocation]: - with self.engine.scoped_session() as session: - locations = ( - session.query( - models.DatasetStorageLocation.locationUri.label('locationUri'), - models.DatasetStorageLocation.S3BucketName.label('S3BucketName'), - models.DatasetStorageLocation.S3Prefix.label('S3Prefix'), - models.Environment.AwsAccountId.label('AwsAccountId'), - models.Environment.region.label('region'), - ) - .join( - models.ShareObjectItem, - and_( - models.ShareObjectItem.itemUri - == models.DatasetStorageLocation.locationUri - ), - ) - .join( - models.ShareObject, - models.ShareObject.shareUri == models.ShareObjectItem.shareUri, - ) - .join( - models.Environment, - models.Environment.environmentUri - == models.ShareObject.environmentUri, - ) - .filter( - and_( - models.DatasetStorageLocation.datasetUri == dataset.datasetUri, - models.DatasetStorageLocation.deleted.is_(None), - models.ShareObjectItem.status - == models.Enums.ShareObjectStatus.Approved.value, - ) - ) - ).all() - return locations - - @classmethod - def init_s3_client(cls, dataset): - session = SessionHelper.remote_session(accountid=dataset.AwsAccountId) - client = session.client('s3') - return client - - @classmethod - def get_bucket_policy(cls, client, dataset): - try: - policy = client.get_bucket_policy(Bucket=dataset.S3BucketName)['Policy'] - log.info(f'Current bucket policy---->:{policy}') - policy = json.loads(policy) - except ClientError as err: - if err.response['Error']['Code'] == 'NoSuchBucketPolicy': - log.info(f"No policy attached to '{dataset.S3BucketName}'") - - elif err.response['Error']['Code'] == 'NoSuchBucket': - log.error(f'Bucket deleted {dataset.S3BucketName}') - - elif err.response['Error']['Code'] == 'AccessDenied': - log.error( - f'Access denied in {dataset.AwsAccountId} ' - f'(s3:{err.operation_name}, ' - f"resource='{dataset.S3BucketName}')" - ) - else: - log.exception( - f"Failed to get '{dataset.S3BucketName}' policy in {dataset.AwsAccountId}" - ) - policy = { - 'Version': '2012-10-17', - 'Statement': [ - { - 'Sid': 'OwnerAccount', - 'Effect': 'Allow', - 'Action': ['s3:*'], - 'Resource': [ - f'arn:aws:s3:::{dataset.S3BucketName}', - f'arn:aws:s3:::{dataset.S3BucketName}/*', - ], - 'Principal': { - 'AWS': f'arn:aws:iam::{dataset.AwsAccountId}:root' - }, - } - ], - } - - return policy - - @staticmethod - def put_bucket_policy(s3_client, dataset, policy): - update_policy_report = { - 'datasetUri': dataset.datasetUri, - 'bucketName': dataset.S3BucketName, - 'accountId': dataset.AwsAccountId, - } - try: - policy_json = json.dumps(policy) if isinstance(policy, dict) else policy - log.info( - f"Putting new bucket policy on '{dataset.S3BucketName}' policy {policy_json}" - ) - response = s3_client.put_bucket_policy( - Bucket=dataset.S3BucketName, Policy=policy_json - ) - log.info(f'Bucket Policy updated: {response}') - update_policy_report.update({'status': 'SUCCEEDED'}) - except ClientError as e: - log.error( - f'Failed to update bucket policy ' - f"on '{dataset.S3BucketName}' policy {policy} " - f'due to {e} ' - ) - update_policy_report.update({'status': 'FAILED'}) - - return update_policy_report - - -if __name__ == '__main__': - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - log.info('Updating bucket policies for shared datasets...') - service = BucketPoliciesUpdater(engine=ENGINE) - service.sync_imported_datasets_bucket_policies() - log.info('Bucket policies for shared datasets update successfully...') diff --git a/backend/dataall/tasks/catalog_indexer.py b/backend/dataall/tasks/catalog_indexer.py deleted file mode 100644 index 2a53880c8..000000000 --- a/backend/dataall/tasks/catalog_indexer.py +++ /dev/null @@ -1,62 +0,0 @@ -import logging -import os -import sys - -from .. import db -from ..db import get_engine, exceptions -from ..db import models -from ..searchproxy import indexers -from ..searchproxy.connect import ( - connect, -) -from ..utils.alarm_service import AlarmService - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -def index_objects(engine, es): - try: - if not es: - raise exceptions.AWSResourceNotFound( - action='CATALOG_INDEXER_TASK', message='ES configuration not found' - ) - indexed_objects_counter = 0 - with engine.scoped_session() as session: - - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( - session - ) - log.info(f'Found {len(all_datasets)} datasets') - dataset: models.Dataset - for dataset in all_datasets: - tables = indexers.upsert_dataset_tables(session, es, dataset.datasetUri) - folders = indexers.upsert_dataset_folders( - session, es, dataset.datasetUri - ) - indexed_objects_counter = ( - indexed_objects_counter + len(tables) + len(folders) + 1 - ) - - all_dashboards: [models.Dashboard] = session.query(models.Dashboard).all() - log.info(f'Found {len(all_dashboards)} dashboards') - dashboard: models.Dashboard - for dashboard in all_dashboards: - indexers.upsert_dashboard(session, es, dashboard.dashboardUri) - indexed_objects_counter = indexed_objects_counter + 1 - - log.info(f'Successfully indexed {indexed_objects_counter} objects') - return indexed_objects_counter - except Exception as e: - AlarmService().trigger_catalog_indexing_failure_alarm(error=str(e)) - raise e - - -if __name__ == '__main__': - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - ES = connect(envname=ENVNAME) - index_objects(engine=ENGINE, es=ES) diff --git a/backend/dataall/tasks/data_sharing/data_sharing_service.py b/backend/dataall/tasks/data_sharing/data_sharing_service.py deleted file mode 100644 index 40b79f1c1..000000000 --- a/backend/dataall/tasks/data_sharing/data_sharing_service.py +++ /dev/null @@ -1,299 +0,0 @@ -import logging -import os - -from .share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare -from .share_processors.lf_process_same_account_share import ProcessLFSameAccountShare -from .share_processors.s3_process_share import ProcessS3Share - -from ...aws.handlers.ram import Ram -from ...aws.handlers.sts import SessionHelper -from ...db import api, models, Engine -from ...utils import Parameter - -log = logging.getLogger(__name__) - - -class DataSharingService: - def __init__(self): - pass - - @classmethod - def approve_share(cls, engine: Engine, share_uri: str) -> bool: - """ - 1) Updates share object State Machine with the Action: Start - 2) Retrieves share data and items in Share_Approved state - 3) Calls sharing folders processor to grant share - 4) Calls sharing tables processor for same or cross account sharing to grant share - 5) Updates share object State Machine with the Action: Finish - - Parameters - ---------- - engine : db.engine - share_uri : share uri - - Returns - ------- - True if sharing succeeds, - False if folder or table sharing failed - """ - with engine.scoped_session() as session: - ( - source_env_group, - env_group, - dataset, - share, - source_environment, - target_environment, - ) = api.ShareObject.get_share_data(session, share_uri) - - Share_SM = api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Start.value) - Share_SM.update_state(session, share, new_share_state) - - ( - shared_tables, - shared_folders - ) = api.ShareObject.get_share_data_items(session, share_uri, models.ShareItemStatus.Share_Approved.value) - - log.info(f'Granting permissions to folders: {shared_folders}') - - approved_folders_succeed = ProcessS3Share.process_approved_shares( - session, - dataset, - share, - shared_folders, - source_environment, - target_environment, - source_env_group, - env_group - ) - log.info(f'sharing folders succeeded = {approved_folders_succeed}') - - if source_environment.AwsAccountId != target_environment.AwsAccountId: - processor = ProcessLFCrossAccountShare( - session, - dataset, - share, - shared_tables, - [], - source_environment, - target_environment, - env_group, - ) - else: - processor = ProcessLFSameAccountShare( - session, - dataset, - share, - shared_tables, - [], - source_environment, - target_environment, - env_group - ) - - log.info(f'Granting permissions to tables: {shared_tables}') - approved_tables_succeed = processor.process_approved_shares() - log.info(f'sharing tables succeeded = {approved_tables_succeed}') - - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Finish.value) - Share_SM.update_state(session, share, new_share_state) - - return approved_tables_succeed if approved_folders_succeed else False - - @classmethod - def revoke_share(cls, engine: Engine, share_uri: str): - """ - 1) Updates share object State Machine with the Action: Start - 2) Retrieves share data and items in Revoke_Approved state - 3) Calls sharing folders processor to revoke share - 4) Checks if remaining folders are shared and effectuates clean up with folders processor - 5) Calls sharing tables processor for same or cross account sharing to revoke share - 6) Checks if remaining tables are shared and effectuates clean up with tables processor - 7) Updates share object State Machine with the Action: Finish - - Parameters - ---------- - engine : db.engine - share_uri : share uri - - Returns - ------- - True if revoke succeeds - False if folder or table revoking failed - """ - - with engine.scoped_session() as session: - ( - source_env_group, - env_group, - dataset, - share, - source_environment, - target_environment, - ) = api.ShareObject.get_share_data(session, share_uri) - - Share_SM = api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Start.value) - Share_SM.update_state(session, share, new_share_state) - - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - - ( - revoked_tables, - revoked_folders - ) = api.ShareObject.get_share_data_items(session, share_uri, models.ShareItemStatus.Revoke_Approved.value) - - new_state = revoked_item_SM.run_transition(models.ShareObjectActions.Start.value) - revoked_item_SM.update_state(session, share_uri, new_state) - - log.info(f'Revoking permissions to folders: {revoked_folders}') - - revoked_folders_succeed = ProcessS3Share.process_revoked_shares( - session, - dataset, - share, - revoked_folders, - source_environment, - target_environment, - source_env_group, - env_group, - ) - log.info(f'revoking folders succeeded = {revoked_folders_succeed}') - existing_shared_items = api.ShareObject.check_existing_shared_items_of_type( - session, - share_uri, - models.ShareableType.StorageLocation.value - ) - log.info(f'Still remaining S3 resources shared = {existing_shared_items}') - if not existing_shared_items and revoked_folders: - log.info("Clean up S3 access points...") - clean_up_folders = ProcessS3Share.clean_up_share( - dataset=dataset, - share=share, - target_environment=target_environment - ) - log.info(f"Clean up S3 successful = {clean_up_folders}") - - if source_environment.AwsAccountId != target_environment.AwsAccountId: - processor = ProcessLFCrossAccountShare( - session, - dataset, - share, - [], - revoked_tables, - source_environment, - target_environment, - env_group, - ) - else: - processor = ProcessLFSameAccountShare( - session, - dataset, - share, - [], - revoked_tables, - source_environment, - target_environment, - env_group) - - log.info(f'Revoking permissions to tables: {revoked_tables}') - revoked_tables_succeed = processor.process_revoked_shares() - log.info(f'revoking tables succeeded = {revoked_tables_succeed}') - - existing_shared_items = api.ShareObject.check_existing_shared_items_of_type( - session, - share_uri, - models.ShareableType.Table.value - ) - log.info(f'Still remaining LF resources shared = {existing_shared_items}') - if not existing_shared_items and revoked_tables: - log.info("Clean up LF remaining resources...") - clean_up_tables = processor.clean_up_share() - log.info(f"Clean up LF successful = {clean_up_tables}") - - existing_pending_items = api.ShareObject.check_pending_share_items(session, share_uri) - if existing_pending_items: - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.FinishPending.value) - else: - new_share_state = Share_SM.run_transition(models.Enums.ShareObjectActions.Finish.value) - Share_SM.update_state(session, share, new_share_state) - - return revoked_tables_succeed and revoked_folders_succeed - - @classmethod - def clean_lfv1_ram_resources(cls, environment: models.Environment): - """ - Deletes LFV1 resource shares for an environment - Parameters - ---------- - environment : models.Environment - - Returns - ------- - None - """ - return Ram.delete_lakeformation_v1_resource_shares( - SessionHelper.remote_session(accountid=environment.AwsAccountId).client( - 'ram', region_name=environment.region - ) - ) - - @classmethod - def refresh_shares(cls, engine: Engine) -> bool: - """ - Refreshes the shares at scheduled frequency. - If a share is in 'Approve' state it triggers an approve ECS sharing task - If a share is in 'Revoked' state it triggers a revoke ECS sharing task - Also cleans up LFV1 ram resource shares if enabled on SSM - Parameters - ---------- - engine : db.engine - - Returns - ------- - true if refresh succeeds - """ - share_object_refreshable_states = api.ShareObjectSM.get_share_object_refreshable_states() - with engine.scoped_session() as session: - environments = session.query(models.Environment).all() - shares = ( - session.query(models.ShareObject) - .filter(models.ShareObject.status.in_(share_object_refreshable_states)) - .all() - ) - - # Feature toggle: default value is False - if ( - Parameter().get_parameter( - os.getenv('envname', 'local'), 'shares/cleanlfv1ram' - ) - == 'True' - ): - log.info('LFV1 Cleanup toggle is enabled') - for e in environments: - log.info( - f'Cleaning LFV1 ram resource for environment: {e.AwsAccountId}/{e.region}...' - ) - cls.clean_lfv1_ram_resources(e) - - if not shares: - log.info('No Approved nor Revoked shares found. Nothing to do...') - return True - - for share in shares: - try: - log.info( - f'Refreshing share {share.shareUri} with {share.status} status...' - ) - if share.status in [models.ShareObjectStatus.Approved.value]: - cls.approve_share(engine, share.shareUri) - else: - cls.revoke_share(engine, share.shareUri) - - except Exception as e: - log.error( - f'Failed refreshing share {share.shareUri} with {share.status}. ' - f'due to: {e}' - ) - return True diff --git a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py b/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py deleted file mode 100644 index 6940d3392..000000000 --- a/backend/dataall/tasks/data_sharing/share_processors/s3_process_share.py +++ /dev/null @@ -1,196 +0,0 @@ -import logging - -from ....db import models, api -from ..share_managers import S3ShareManager - - -log = logging.getLogger(__name__) - - -class ProcessS3Share(S3ShareManager): - def __init__( - self, - session, - dataset: models.Dataset, - share: models.ShareObject, - share_folder: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, - source_env_group: models.EnvironmentGroup, - env_group: models.EnvironmentGroup, - ): - - super().__init__( - session, - dataset, - share, - share_folder, - source_environment, - target_environment, - source_env_group, - env_group, - ) - - @classmethod - def process_approved_shares( - cls, - session, - dataset: models.Dataset, - share: models.ShareObject, - share_folders: [models.DatasetStorageLocation], - source_environment: models.Environment, - target_environment: models.Environment, - source_env_group: models.EnvironmentGroup, - env_group: models.EnvironmentGroup - ) -> bool: - """ - 1) update_share_item_status with Start action - 2) (one time only) manage_bucket_policy - grants permission in the bucket policy - 3) grant_target_role_access_policy - 4) manage_access_point_and_policy - 5) update_dataset_bucket_key_policy - 6) update_share_item_status with Finish action - - Returns - ------- - True if share is granted successfully - """ - log.info( - '##### Starting Sharing folders #######' - ) - success = True - for folder in share_folders: - log.info(f'sharing folder: {folder}') - sharing_item = api.ShareObject.find_share_item_by_folder( - session, - share, - folder, - ) - shared_item_SM = api.ShareItemSM(models.ShareItemStatus.Share_Approved.value) - new_state = shared_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) - shared_item_SM.update_state_single_item(session, sharing_item, new_state) - - sharing_folder = cls( - session, - dataset, - share, - folder, - source_environment, - target_environment, - source_env_group, - env_group, - ) - - try: - sharing_folder.manage_bucket_policy() - sharing_folder.grant_target_role_access_policy() - sharing_folder.manage_access_point_and_policy() - sharing_folder.update_dataset_bucket_key_policy() - - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) - shared_item_SM.update_state_single_item(session, sharing_item, new_state) - - except Exception as e: - sharing_folder.handle_share_failure(e) - new_state = shared_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) - shared_item_SM.update_state_single_item(session, sharing_item, new_state) - success = False - - return success - - @classmethod - def process_revoked_shares( - cls, - session, - dataset: models.Dataset, - share: models.ShareObject, - revoke_folders: [models.DatasetStorageLocation], - source_environment: models.Environment, - target_environment: models.Environment, - source_env_group: models.EnvironmentGroup, - env_group: models.EnvironmentGroup - ) -> bool: - """ - 1) update_share_item_status with Start action - 2) delete_access_point_policy for folder - 3) update_share_item_status with Finish action - - Returns - ------- - True if share is revoked successfully - """ - - log.info( - '##### Starting Revoking folders #######' - ) - success = True - for folder in revoke_folders: - log.info(f'revoking access to folder: {folder}') - removing_item = api.ShareObject.find_share_item_by_folder( - session, - share, - folder, - ) - - revoked_item_SM = api.ShareItemSM(models.ShareItemStatus.Revoke_Approved.value) - new_state = revoked_item_SM.run_transition(models.Enums.ShareObjectActions.Start.value) - revoked_item_SM.update_state_single_item(session, removing_item, new_state) - - removing_folder = cls( - session, - dataset, - share, - folder, - source_environment, - target_environment, - source_env_group, - env_group, - ) - - try: - removing_folder.delete_access_point_policy() - - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Success.value) - revoked_item_SM.update_state_single_item(session, removing_item, new_state) - - except Exception as e: - removing_folder.handle_revoke_failure(e) - new_state = revoked_item_SM.run_transition(models.Enums.ShareItemActions.Failure.value) - revoked_item_SM.update_state_single_item(session, removing_item, new_state) - success = False - - return success - - @staticmethod - def clean_up_share( - dataset: models.Dataset, - share: models.ShareObject, - target_environment: models.Environment - ): - """ - 1) deletes S3 access point for this share in this Dataset S3 Bucket - 2) delete_target_role_access_policy to access the above deleted access point - 3) delete_dataset_bucket_key_policy to remove access to the requester IAM role - - Returns - ------- - True if share is cleaned-up successfully - """ - - clean_up = S3ShareManager.delete_access_point( - share=share, - dataset=dataset - ) - if clean_up: - S3ShareManager.delete_target_role_access_policy( - share=share, - dataset=dataset, - target_environment=target_environment - ) - S3ShareManager.delete_dataset_bucket_key_policy( - share=share, - dataset=dataset, - target_environment=target_environment - ) - - return True diff --git a/backend/dataall/tasks/share_manager.py b/backend/dataall/tasks/share_manager.py deleted file mode 100644 index 000f5d808..000000000 --- a/backend/dataall/tasks/share_manager.py +++ /dev/null @@ -1,37 +0,0 @@ -import logging -import os -import sys - -from .data_sharing.data_sharing_service import DataSharingService -from ..db import get_engine - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -if __name__ == '__main__': - - try: - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - - share_uri = os.getenv('shareUri') - share_item_uri = os.getenv('shareItemUri') - handler = os.getenv('handler') - - if handler == 'approve_share': - log.info(f'Starting processing task for share : {share_uri}...') - DataSharingService.approve_share(engine=ENGINE, share_uri=share_uri) - - elif handler == 'revoke_share': - log.info(f'Starting revoking task for share : {share_uri}...') - DataSharingService.revoke_share(engine=ENGINE, share_uri=share_uri) - - log.info('Sharing task finished successfully') - - except Exception as e: - log.error(f'Sharing task failed due to: {e}') - raise e diff --git a/backend/dataall/tasks/shares_refresh.py b/backend/dataall/tasks/shares_refresh.py deleted file mode 100644 index d1957bc74..000000000 --- a/backend/dataall/tasks/shares_refresh.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging -import os -import sys - -from .data_sharing.data_sharing_service import DataSharingService -from ..db import get_engine - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -if __name__ == '__main__': - - try: - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - - log.info('Starting refresh shares task...') - DataSharingService.refresh_shares(engine=ENGINE) - - log.info('Sharing task finished successfully') - - except Exception as e: - log.error(f'Sharing task failed due to: {e}') - raise e diff --git a/backend/dataall/tasks/stacks_updater.py b/backend/dataall/tasks/stacks_updater.py deleted file mode 100644 index d9f33332e..000000000 --- a/backend/dataall/tasks/stacks_updater.py +++ /dev/null @@ -1,67 +0,0 @@ -import logging -import os -import sys -import time - -from .. import db -from ..db import models -from ..aws.handlers.ecs import Ecs -from ..db import get_engine -from ..utils import Parameter - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - -RETRIES = 30 -SLEEP_TIME = 30 - - -def update_stacks(engine, envname): - with engine.scoped_session() as session: - - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets(session) - all_environments: [models.Environment] = db.api.Environment.list_all_active_environments(session) - - log.info(f'Found {len(all_environments)} environments, triggering update stack tasks...') - environment: models.Environment - for environment in all_environments: - update_stack(session=session, envname=envname, target_uri=environment.environmentUri, wait=True) - - log.info(f'Found {len(all_datasets)} datasets') - dataset: models.Dataset - for dataset in all_datasets: - update_stack(session=session, envname=envname, target_uri=dataset.datasetUri, wait=False) - - return all_environments, all_datasets - - -def update_stack(session, envname, target_uri, wait=False): - stack: models.Stack = db.api.Stack.get_stack_by_target_uri( - session, target_uri=target_uri - ) - cluster_name = Parameter().get_parameter(env=envname, path='ecs/cluster/name') - if not Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): - stack.EcsTaskArn = Ecs.run_cdkproxy_task(stack_uri=stack.stackUri) - if wait: - retries = 1 - while Ecs.is_task_running(cluster_name=cluster_name, started_by=f'awsworker-{stack.stackUri}'): - log.info(f"Update for {stack.name}//{stack.stackUri} is not complete, waiting for {SLEEP_TIME} seconds...") - time.sleep(SLEEP_TIME) - retries = retries + 1 - if retries > RETRIES: - log.info(f"Maximum number of retries exceeded ({RETRIES} retries), continuing task...") - break - log.info(f"Update for {stack.name}//{stack.stackUri} COMPLETE or maximum number of retries exceeded ({RETRIES} retries)") - else: - log.info( - f'Stack update is already running... Skipping stack {stack.name}//{stack.stackUri}' - ) - - -if __name__ == '__main__': - envname = os.environ.get('envname', 'local') - engine = get_engine(envname=envname) - update_stacks(engine=engine, envname=envname) diff --git a/backend/dataall/tasks/subscriptions/__init__.py b/backend/dataall/tasks/subscriptions/__init__.py deleted file mode 100644 index f60ca5310..000000000 --- a/backend/dataall/tasks/subscriptions/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .sqs_poller import poll_queues -from .subscription_service import SubscriptionService diff --git a/backend/dataall/tasks/subscriptions/subscription_service.py b/backend/dataall/tasks/subscriptions/subscription_service.py deleted file mode 100644 index 52aeb4e40..000000000 --- a/backend/dataall/tasks/subscriptions/subscription_service.py +++ /dev/null @@ -1,336 +0,0 @@ -import json -import logging -import os -import sys - -from botocore.exceptions import ClientError -from sqlalchemy import and_ - -from ... import db -from ...aws.handlers.service_handlers import Worker -from ...aws.handlers.sts import SessionHelper -from ...aws.handlers.sqs import SqsQueue -from ...db import get_engine -from ...db import models -from ...tasks.subscriptions import poll_queues -from ...utils import json_utils - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -class SubscriptionService: - def __init__(self): - pass - - @staticmethod - def get_environments(engine): - with engine.scoped_session() as session: - return db.api.Environment.list_all_active_environments(session) - - @staticmethod - def get_queues(environments: [models.Environment]): - queues = [] - for env in environments: - queues.append( - { - 'url': f'https://sqs.{env.region}.amazonaws.com/{env.AwsAccountId}/{env.resourcePrefix}-producers-queue-{env.environmentUri}', - 'region': env.region, - 'accountid': env.AwsAccountId, - 'arn': f'arn:aws:sqs:{env.region}:{env.AwsAccountId}:ProducersSubscriptionsQueue-{env.environmentUri}', - 'name': f'{env.resourcePrefix}-producers-queue-{env.environmentUri}', - } - ) - return queues - - @staticmethod - def notify_consumers(engine, messages): - - log.info(f'Notifying consumers with messages {messages}') - - with engine.scoped_session() as session: - - for message in messages: - - SubscriptionService.publish_table_update_message(engine, message) - - SubscriptionService.publish_location_update_message(session, message) - - return True - - @staticmethod - def publish_table_update_message(engine, message): - with engine.scoped_session() as session: - table: models.DatasetTable = db.api.DatasetTable.get_table_by_s3_prefix( - session, - message.get('prefix'), - message.get('accountid'), - message.get('region'), - ) - if not table: - log.info(f'No table for message {message}') - else: - log.info( - f'Found table {table.tableUri}|{table.GlueTableName}|{table.S3Prefix}' - ) - - dataset: models.Dataset = session.query(models.Dataset).get( - table.datasetUri - ) - log.info( - f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' - ) - share_items: [models.ShareObjectItem] = ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.itemUri == table.tableUri) - .all() - ) - log.info(f'Found shared items for table {share_items}') - - return SubscriptionService.publish_sns_message( - engine, - message, - dataset, - share_items, - table.S3Prefix, - table=table, - ) - - @staticmethod - def publish_location_update_message(session, message): - location: models.DatasetStorageLocation = ( - db.api.DatasetStorageLocation.get_location_by_s3_prefix( - session, - message.get('prefix'), - message.get('accountid'), - message.get('region'), - ) - ) - if not location: - log.info(f'No location found for message {message}') - - else: - log.info(f'Found location {location.locationUri}|{location.S3Prefix}') - - dataset: models.Dataset = session.query(models.Dataset).get( - location.datasetUri - ) - log.info( - f'Found dataset {dataset.datasetUri}|{dataset.environmentUri}|{dataset.AwsAccountId}' - ) - share_items: [models.ShareObjectItem] = ( - session.query(models.ShareObjectItem) - .filter(models.ShareObjectItem.itemUri == location.locationUri) - .all() - ) - log.info(f'Found shared items for location {share_items}') - - return SubscriptionService.publish_sns_message( - session, message, dataset, share_items, location.S3Prefix - ) - - @staticmethod - def store_dataquality_results(session, message): - - table: models.DatasetTable = db.api.DatasetTable.get_table_by_s3_prefix( - session, - message.get('prefix'), - message.get('accountid'), - message.get('region'), - ) - - run = db.api.DatasetProfilingRun.start_profiling( - session=session, - datasetUri=table.datasetUri, - GlueTableName=table.GlueTableName, - tableUri=table.tableUri, - ) - - run.status = 'SUCCEEDED' - run.GlueTableName = table.GlueTableName - quality_results = message.get('dataQuality') - - if message.get('datasetRegionId'): - quality_results['regionId'] = message.get('datasetRegionId') - - if message.get('rows'): - quality_results['table_nb_rows'] = message.get('rows') - - SubscriptionService.set_columns_type(quality_results, message) - - data_types = SubscriptionService.set_data_types(message) - - quality_results['dataTypes'] = data_types - - quality_results['integrationDateTime'] = message.get('integrationDateTime') - - results = json.dumps(json_utils.to_json(quality_results)) - - log.info( - '>>> Stored dataQuality results received from the SNS notification: %s', - results, - ) - - run.results = results - - session.commit() - return True - - @staticmethod - def set_data_types(message): - data_types = [] - for field in message.get('fields'): - added = False - for d in data_types: - if d.get('type').lower() == field[1].lower(): - d['count'] = d['count'] + 1 - added = True - break - if not added: - data_types.append({'type': field[1], 'count': 1}) - return data_types - - @staticmethod - def set_columns_type(quality_results, message): - for c in quality_results.get('columns'): - if not c.get('Type'): - for field in message.get('fields'): - if field[0].lower() == c['Name'].lower(): - c['Type'] = field[1] - - @staticmethod - def publish_sns_message( - engine, message, dataset, share_items, prefix, table: models.DatasetTable = None - ): - with engine.scoped_session() as session: - for item in share_items: - - share_object = SubscriptionService.get_approved_share_object( - session, item - ) - - if not share_object or not share_object.principalId: - log.error( - f'Share Item with no share object or no principalId ? {item.shareItemUri}' - ) - else: - environment = session.query(models.Environment).get( - share_object.principalId - ) - if not environment: - log.error( - f'Environment of share owner was deleted ? {share_object.principalId}' - ) - else: - log.info(f'Notifying share owner {share_object.owner}') - - log.info( - f'found environment {environment.environmentUri}|{environment.AwsAccountId} of share owner {share_object.owner}' - ) - - try: - - if table: - message['table'] = table.GlueTableName - - log.info( - f'Producer message before notifications: {message}' - ) - - SubscriptionService.redshift_copy( - engine, message, dataset, environment, table - ) - - message = { - 'location': prefix, - 'owner': dataset.owner, - 'message': f'Dataset owner {dataset.owner} ' - f'has updated the table shared with you {prefix}', - } - - response = SubscriptionService.sns_call( - message, environment - ) - - log.info(f'SNS update publish response {response}') - - notifications = db.api.Notification.notify_new_data_available_from_owners( - session=session, - dataset=dataset, - share=share_object, - s3_prefix=prefix, - ) - log.info(f'Notifications for share owners {notifications}') - - except ClientError as e: - log.error( - f'Failed to deliver message {message} due to: {e}' - ) - - @staticmethod - def sns_call(message, environment): - aws_session = SessionHelper.remote_session(environment.AwsAccountId) - sns = aws_session.client('sns', region_name=environment.region) - response = sns.publish( - TopicArn=f'arn:aws:sns:{environment.region}:{environment.AwsAccountId}:{environment.subscriptionsConsumersTopicName}', - Message=json.dumps(message), - ) - return response - - @staticmethod - def redshift_copy( - engine, - message, - dataset: models.Dataset, - environment: models.Environment, - table: models.DatasetTable, - ): - log.info( - f'Redshift copy starting ' - f'{environment.environmentUri}|{dataset.datasetUri}' - f'|{json_utils.to_json(message)}' - ) - with engine.scoped_session() as session: - task = models.Task( - action='redshift.subscriptions.copy', - targetUri=environment.environmentUri, - payload={ - 'datasetUri': dataset.datasetUri, - 'message': json_utils.to_json(message), - 'tableUri': table.tableUri, - }, - ) - session.add(task) - session.commit() - - response = Worker.queue(engine, [task.taskUri]) - return response - - @staticmethod - def get_approved_share_object(session, item): - share_object: models.ShareObject = ( - session.query(models.ShareObject) - .filter( - and_( - models.ShareObject.shareUri == item.shareUri, - models.ShareObject.status == 'Approved', - ) - ) - .first() - ) - return share_object - - -if __name__ == '__main__': - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - Worker.queue = SqsQueue.send - log.info('Polling datasets updates...') - service = SubscriptionService() - queues = service.get_queues(service.get_environments(ENGINE)) - messages = poll_queues(queues) - service.notify_consumers(ENGINE, messages) - log.info('Datasets updates shared successfully') diff --git a/backend/dataall/tasks/tables_syncer.py b/backend/dataall/tasks/tables_syncer.py deleted file mode 100644 index a441ff197..000000000 --- a/backend/dataall/tasks/tables_syncer.py +++ /dev/null @@ -1,115 +0,0 @@ -import logging -import os -import sys -from operator import and_ - -from .. import db -from ..aws.handlers.glue import Glue -from ..aws.handlers.sts import SessionHelper -from ..db import get_engine -from ..db import models -from ..searchproxy import indexers -from ..searchproxy.connect import ( - connect, -) -from ..utils.alarm_service import AlarmService - -root = logging.getLogger() -root.setLevel(logging.INFO) -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) -log = logging.getLogger(__name__) - - -def sync_tables(engine, es=None): - with engine.scoped_session() as session: - processed_tables = [] - all_datasets: [models.Dataset] = db.api.Dataset.list_all_active_datasets( - session - ) - log.info(f'Found {len(all_datasets)} datasets for tables sync') - dataset: models.Dataset - for dataset in all_datasets: - log.info( - f'Synchronizing dataset {dataset.name}|{dataset.datasetUri} tables' - ) - env: models.Environment = ( - session.query(models.Environment) - .filter( - and_( - models.Environment.environmentUri == dataset.environmentUri, - models.Environment.deleted.is_(None), - ) - ) - .first() - ) - env_group: models.EnvironmentGroup = ( - db.api.Environment.get_environment_group( - session, dataset.SamlAdminGroupName, env.environmentUri - ) - ) - try: - if not env or not is_assumable_pivot_role(env): - log.info( - f'Dataset {dataset.GlueDatabaseName} has an invalid environment' - ) - else: - - tables = Glue.list_glue_database_tables( - dataset.AwsAccountId, dataset.GlueDatabaseName, dataset.region - ) - - log.info( - f'Found {len(tables)} tables on Glue database {dataset.GlueDatabaseName}' - ) - - db.api.DatasetTable.sync( - session, dataset.datasetUri, glue_tables=tables - ) - - tables = ( - session.query(models.DatasetTable) - .filter(models.DatasetTable.datasetUri == dataset.datasetUri) - .all() - ) - - log.info('Updating tables permissions on Lake Formation...') - - for table in tables: - Glue.grant_principals_all_table_permissions( - table, - principals=[ - SessionHelper.get_delegation_role_arn(env.AwsAccountId), - env_group.environmentIAMRoleArn, - ], - ) - - processed_tables.extend(tables) - - if es: - indexers.upsert_dataset_tables(session, es, dataset.datasetUri) - except Exception as e: - log.error( - f'Failed to sync tables for dataset ' - f'{dataset.AwsAccountId}/{dataset.GlueDatabaseName} ' - f'due to: {e}' - ) - AlarmService().trigger_dataset_sync_failure_alarm(dataset, str(e)) - return processed_tables - - -def is_assumable_pivot_role(env: models.Environment): - aws_session = SessionHelper.remote_session(accountid=env.AwsAccountId) - if not aws_session: - log.error( - f'Failed to assume dataall pivot role in environment {env.AwsAccountId}' - ) - return False - return True - - -if __name__ == '__main__': - ENVNAME = os.environ.get('envname', 'local') - ENGINE = get_engine(envname=ENVNAME) - ES = connect(envname=ENVNAME) - sync_tables(engine=ENGINE, es=ES) diff --git a/backend/dataall/utils/__init__.py b/backend/dataall/utils/__init__.py deleted file mode 100644 index 14595b7ad..000000000 --- a/backend/dataall/utils/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .parameter import Parameter -from .secrets_manager import Secrets -from .slugify import slugify diff --git a/backend/dataall/utils/alarm_service.py b/backend/dataall/utils/alarm_service.py deleted file mode 100644 index 838029d3e..000000000 --- a/backend/dataall/utils/alarm_service.py +++ /dev/null @@ -1,213 +0,0 @@ -# This module is a wrapper for the cdk cli -# Python native subprocess package is used to spawn cdk [deploy|destroy] commands with appropriate parameters. -# Additionally, it uses the cdk plugin cdk-assume-role-credential-plugin to run cdk commands on target accounts -# see : https://github.com/aws-samples/cdk-assume-role-credential-plugin - -import logging -import os -from datetime import datetime - -from botocore.exceptions import ClientError - -from ..aws.handlers.sts import SessionHelper -from ..db import models - -logger = logging.getLogger(__name__) - - -class AlarmService: - def __init__(self): - self.envname = os.getenv('envname', 'local') - self.region = os.environ.get('AWS_REGION', 'eu-west-1') - - def trigger_stack_deployment_failure_alarm(self, stack: models.Stack): - logger.info('Triggering deployment failure alarm...') - subject = f'ALARM: DATAALL Stack {stack.name} Deployment Failure Notification' - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to deploy one of its resource CloudFormation stacks {stack.name} - -View the ECS task logs in the AWS Management Console: -https://{self.region}.console.aws.amazon.com/cloudwatch/deeplink.js?region=eu-west-1#logsV2:log-groups/log-group/$252Fdataall$252F{self.envname}$252Fecs$252Fcdkproxy/log-events/task$252Fcontainer$252F{stack.EcsTaskArn.split('/')[-1]} - -Alarm Details: -- Stack Name: {stack.name} -- AWS Account: {stack.accountid} -- Region: {stack.region} -- State Change: OK -> ALARM -- Reason for State Change: Stack Deployment Failure -- Timestamp: {datetime.now()} -- CW Log Group: {f"/dataall/{self.envname}/cdkproxy/{stack.EcsTaskArn.split('/')[-1]}"} -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_table_sharing_failure_alarm( - self, - table: models.DatasetTable, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Table {table.GlueTableName} Sharing Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: Lake Formation sharing failure - - Timestamp: {datetime.now()} - - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {table.AWSAccountId} - - Region: {table.region} - - Glue Database: {table.GlueDatabaseName} - - Glue Table: {table.GlueTableName} - - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} - - Glue Database: {table.GlueDatabaseName}shared -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_folder_sharing_failure_alarm( - self, - folder: models.DatasetStorageLocation, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: S3 Folder sharing failure - - Timestamp: {datetime.now()} - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {folder.AWSAccountId} - - Region: {folder.region} - - S3 Bucket: {folder.S3BucketName} - - S3 Folder: {folder.S3Prefix} - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} -""" - - def trigger_revoke_folder_sharing_failure_alarm( - self, - folder: models.DatasetStorageLocation, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = ( - f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Revoke Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point. -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: S3 Folder sharing Revoke failure - - Timestamp: {datetime.now()} - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {folder.AWSAccountId} - - Region: {folder.region} - - S3 Bucket: {folder.S3BucketName} - - S3 Folder: {folder.S3Prefix} - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} -""" - - def trigger_revoke_table_sharing_failure_alarm( - self, - table: models.DatasetTable, - share: models.ShareObject, - target_environment: models.Environment, - ): - logger.info('Triggering share failure alarm...') - subject = f'ALARM: DATAALL Table {table.GlueTableName} Revoking LF permissions Failure Notification' - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: Lake Formation sharing failure - - Timestamp: {datetime.now()} - - Share Source - - Dataset URI: {share.datasetUri} - - AWS Account: {table.AWSAccountId} - - Region: {table.region} - - Glue Database: {table.GlueDatabaseName} - - Glue Table: {table.GlueTableName} - - Share Target - - AWS Account: {target_environment.AwsAccountId} - - Region: {target_environment.region} - - Glue Database: {table.GlueDatabaseName}shared -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_catalog_indexing_failure_alarm(self, error: str): - logger.info('Triggering catalog indexing failure alarm...') - subject = 'ALARM: DATAALL Catalog Indexing Failure Notification' - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to index new items into OpenSearch. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: {error} - - Timestamp: {datetime.now()} -""" - return self.publish_message_to_alarms_topic(subject, message) - - def trigger_dataset_sync_failure_alarm(self, dataset: models.Dataset, error: str): - logger.info(f'Triggering dataset {dataset.name} tables sync failure alarm...') - subject = ( - f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification' - ) - message = f""" -You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog. - -Alarm Details: - - State Change: OK -> ALARM - - Reason for State Change: {error} - - Timestamp: {datetime.now()} - Dataset - - Dataset URI: {dataset.datasetUri} - - AWS Account: {dataset.AwsAccountId} - - Region: {dataset.region} - - Glue Database: {dataset.GlueDatabaseName} - """ - return self.publish_message_to_alarms_topic(subject, message) - - def publish_message_to_alarms_topic(self, subject, message): - if self.envname in ['local', 'pytest', 'dkrcompose']: - logger.debug('Running in local mode...SNS topic not available') - else: - region = os.getenv('AWS_REGION', 'eu-west-1') - session = SessionHelper.get_session() - ssm = session.client('ssm', region_name=region) - sns = session.client('sns', region_name=region) - alarms_topic_arn = ssm.get_parameter( - Name=f'/dataall/{self.envname}/sns/alarmsTopic' - )['Parameter']['Value'] - try: - logger.info('Sending deployment failure notification') - response = sns.publish( - TopicArn=alarms_topic_arn, - Subject=subject, - Message=message, - ) - return response - except ClientError as e: - logger.error(f'Failed to deliver message due to: {e} ') - raise e diff --git a/backend/dataall/utils/runtime_stacks_tagging.py b/backend/dataall/utils/runtime_stacks_tagging.py deleted file mode 100644 index d95dc7d8a..000000000 --- a/backend/dataall/utils/runtime_stacks_tagging.py +++ /dev/null @@ -1,176 +0,0 @@ -import os -import typing -from enum import Enum - -from aws_cdk import Stack, Tags - -from .. import db -from ..db import models - - -# Tag keys for Stacks -class StackTagName(Enum): - def __str__(self): - return str(self.value) - - CREATOR = 'Creator' - ORGANISATION = 'Organization' - ENVIRONMENT = 'Environment' - TARGET = 'Target' - TEAM = 'Team' - DATAALL = 'dataall' - - -# Tags adding class -class TagsUtil: - def __init__(self, stack): - self.stack = stack - - @classmethod - def add_tags(cls, stack: Stack) -> [tuple]: - """ - A class method that adds tags to a Stack - """ - - # Get the list of tags to be added from the tag factory - stack_tags_to_add = cls.tag_factory(stack) - - # Add the tags to the Stack - for tag in stack_tags_to_add: - Tags.of(stack).add(str(tag[0]), str(tag[1])) - - return stack_tags_to_add - - @classmethod - def tag_factory(cls, stack: Stack) -> typing.List[typing.Tuple]: - """ - A class method that returns tags to be added to a Stack (based on Stack type) - """ - - _stack_tags = [] - - # Dictionary that resolves the Stack class name to the GraphQL model - stack_model = dict( - Dataset=models.Dataset, - EnvironmentSetup=models.Environment, - SagemakerStudioDomain=models.SagemakerStudioUserProfile, - SagemakerStudioUserProfile=models.SagemakerStudioUserProfile, - SagemakerNotebook=models.SagemakerNotebook, - PipelineStack=models.DataPipeline, - CDKPipelineStack=models.DataPipeline, - RedshiftStack=models.RedshiftCluster, - ) - - engine = cls.get_engine() - - # Initialize references to stack's environment and organisation - with engine.scoped_session() as session: - model_name = stack_model[stack.__class__.__name__] - target_stack = cls.get_target(session, stack, model_name) - environment = cls.get_environment(session, target_stack) - organisation = cls.get_organization(session, environment) - key_value_tags: [models.KeyValueTag] = cls.get_model_key_value_tags( - session, stack, model_name - ) - cascaded_tags: [models.KeyValueTag] = cls.get_environment_cascade_key_value_tags( - session, environment.environmentUri - ) - - # Build a list of tuples with tag keys and values based on the collected up to this point - # ex. target_stack, organisation etc. - _common_stack_tags = [ - (StackTagName.CREATOR.value, target_stack.owner), - ( - StackTagName.ORGANISATION.value, - organisation.name + '_' + organisation.organizationUri, - ), - ( - StackTagName.ENVIRONMENT.value, - environment.name + '_' + environment.environmentUri, - ), - ( - StackTagName.TEAM.value, - ( - target_stack.SamlGroupName - if hasattr(target_stack, 'SamlGroupName') - else target_stack.SamlAdminGroupName - ), - ), - ( - StackTagName.TARGET.value, - model_name.__name__ + '_' + stack.target_uri, - ), - ( - StackTagName.DATAALL.value, - 'true', - ), - ] - - # Build the final tag list with common tags - _stack_tags.extend(_common_stack_tags) - - # ..and any additional key value tags - _stack_tags.extend(key_value_tags) - - # .. and cascade tags inherited form the environment - _stack_tags.extend(cascaded_tags) - - # Duplicate tag keys are not allowed on CloudFormation. Also Tag keys are case insensitive - _stack_tags = list(cls.remove_duplicate_tag_keys(_stack_tags).values()) - - return _stack_tags - - @classmethod - def get_engine(cls): - envname = os.environ.get('envname', 'local') - engine = db.get_engine(envname=envname) - return engine - - @classmethod - def get_target(cls, session, stack, model_name): - return session.query(model_name).get(stack.target_uri) - - @classmethod - def get_organization(cls, session, environment): - organisation: models.Organization = db.api.Organization.get_organization_by_uri( - session, environment.organizationUri - ) - return organisation - - @classmethod - def get_environment(cls, session, target_stack): - environment: models.Environment = db.api.Environment.get_environment_by_uri( - session, target_stack.environmentUri - ) - return environment - - @classmethod - def get_model_key_value_tags(cls, session, stack, model_name): - return [ - (kv.key, kv.value) - for kv in db.api.KeyValueTag.find_key_value_tags( - session, - stack.target_uri, - db.api.TargetType.get_target_type(model_name), - ) - ] - - @classmethod - def get_environment_cascade_key_value_tags(cls, session, environmentUri): - return [ - (kv.key, kv.value) - for kv in db.api.KeyValueTag.find_environment_cascade_key_value_tags( - session, - environmentUri, - ) - ] - - @classmethod - def remove_duplicate_tag_keys(cls, _stack_tags): - compare_dict = dict() - results_dict = dict() - for key, value in reversed(_stack_tags): - if key.lower() not in compare_dict: # we see this key for the first time - compare_dict[key.lower()] = (key, value) - results_dict[key] = (key, value) - return results_dict diff --git a/backend/dataall/utils/secrets_manager.py b/backend/dataall/utils/secrets_manager.py deleted file mode 100644 index 0a7c41dd6..000000000 --- a/backend/dataall/utils/secrets_manager.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging -import os - -import boto3 - -log = logging.getLogger('utils:Secrets') - - -class Secrets: - prefix = 'dataall' - - @classmethod - def get_secret(cls, env, secret_name): - print('will get secret', env, secret_name) - if not secret_name: - raise Exception('Secret name is None') - secret_name = f'/{cls.prefix}/{env}/{secret_name}' - secret_name = secret_name.replace('//', '/') - print(secret_name) - client = boto3.client( - 'secretsmanager', region_name=os.getenv('AWS_REGION', 'eu-west-1') - ) - secret = client.get_secret_value(SecretId=secret_name).get('SecretString') - print('secret = ', secret) - return secret diff --git a/backend/docker/dev/Dockerfile b/backend/docker/dev/Dockerfile index 486c42c0f..7810eb947 100644 --- a/backend/docker/dev/Dockerfile +++ b/backend/docker/dev/Dockerfile @@ -50,7 +50,7 @@ $PATH" >> ~/.bashrc && \ RUN /bin/bash -c ". ~/.nvm/nvm.sh && cdk --version" COPY ./requirements.txt dh.requirements.txt -COPY ./dataall/cdkproxy/requirements.txt cdk.requirements.txt +COPY ./dataall/base/cdkproxy/requirements.txt cdk.requirements.txt RUN /bin/bash -c "${PYTHON_VERSION} -m pip install -U pip " RUN /bin/bash -c "${PYTHON_VERSION} -m pip install -r dh.requirements.txt" diff --git a/backend/docker/prod/ecs/Dockerfile b/backend/docker/prod/ecs/Dockerfile index 334fc043a..aadf853ab 100644 --- a/backend/docker/prod/ecs/Dockerfile +++ b/backend/docker/prod/ecs/Dockerfile @@ -46,7 +46,7 @@ RUN $PYTHON_VERSION -m pip install -U pip # App specific ADD backend/requirements.txt /dh.requirements.txt -ADD backend/dataall/cdkproxy/requirements.txt /cdk.requirements.txt +ADD backend/dataall/base/cdkproxy/requirements.txt /cdk.requirements.txt RUN /bin/bash -c "pip3.8 install -r /dh.requirements.txt" \ && /bin/bash -c "pip3.8 install -r /cdk.requirements.txt" @@ -55,8 +55,11 @@ ADD backend/dataall /dataall VOLUME ["/dataall"] ADD backend/cdkproxymain.py /cdkproxymain.py -RUN mkdir -p dataall/cdkproxy/assets/glueprofilingjob/jars -ADD https://repo1.maven.org/maven2/com/amazon/deequ/deequ/$DEEQU_VERSION/deequ-$DEEQU_VERSION.jar /dataall/cdkproxy/assets/glueprofilingjob/jars/ +ENV config_location="/config.json" +COPY config.json /config.json + +RUN mkdir -p dataall/modules/datasets/cdk/assets/glueprofilingjob/jars/ +ADD https://repo1.maven.org/maven2/com/amazon/deequ/deequ/$DEEQU_VERSION/deequ-$DEEQU_VERSION.jar /dataall/modules/datasets/cdk/assets/glueprofilingjob/jars/ WORKDIR / diff --git a/backend/docker/prod/lambda/Dockerfile b/backend/docker/prod/lambda/Dockerfile index 42b98d65a..74609e98c 100644 --- a/backend/docker/prod/lambda/Dockerfile +++ b/backend/docker/prod/lambda/Dockerfile @@ -23,6 +23,9 @@ RUN $PYTHON_VERSION -m pip install -r requirements.txt -t . COPY backend/. ./ +ENV config_location="config.json" +COPY config.json ./config.json + ## You must add the Lambda Runtime Interface Client (RIC) for your runtime. RUN $PYTHON_VERSION -m pip install awslambdaric --target ${FUNCTION_DIR} diff --git a/backend/local.graphql.server.py b/backend/local.graphql.server.py deleted file mode 100644 index 094e53052..000000000 --- a/backend/local.graphql.server.py +++ /dev/null @@ -1,156 +0,0 @@ -import os - -import boto3 -import jwt -from ariadne import graphql_sync -from ariadne.constants import PLAYGROUND_HTML -from flask import Flask, request, jsonify -from flask_cors import CORS - -from dataall import db - -sts = boto3.client('sts', region_name='eu-west-1') -from dataall.api import get_executable_schema -from dataall.aws.handlers.service_handlers import Worker -from dataall.db import get_engine, Base, create_schema_and_tables, init_permissions, api -from dataall.searchproxy import connect, run_query - -import logging - -logger = logging.getLogger('graphql') -logger.propagate = False -logger.setLevel(logging.INFO) -Worker.queue = Worker.process -ENVNAME = os.getenv('envname', 'local') -logger.warning(f'Connecting to database `{ENVNAME}`') -engine = get_engine(envname=ENVNAME) -es = connect(envname=ENVNAME) -logger.info('Connected') -# create_schema_and_tables(engine, envname=ENVNAME) -Base.metadata.create_all(engine.engine) -CDKPROXY_URL = ( - 'http://cdkproxy:2805' if ENVNAME == 'dkrcompose' else 'http://localhost:2805' -) - -init_permissions(engine) - - -class Context: - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -schema = get_executable_schema() -# app = GraphQL(schema, debug=True) - -app = Flask(__name__) -CORS(app) - - -def request_context(headers, mock=False): - if mock: - username = headers.get('username', 'anonymous@amazon.com') - groups = headers.get('groups', ['Scientists', 'DAAdministrators', 'Engineers', 'Other']) - else: - if not headers.get('Authorization'): - raise Exception('Missing Authorization header') - try: - decoded = jwt.decode(headers.get('Authorization'), options={"verify_signature": False}) - username = decoded.get('email', 'anonymous') - groups = [] - saml_groups = decoded.get('custom:saml.groups', []) - if len(saml_groups): - groups: list = ( - saml_groups.replace('[', '') - .replace(']', '') - .replace(', ', ',') - .split(',') - ) - cognito_groups = decoded.get('cognito:groups', []) - groups.extend(cognito_groups) - except Exception as e: - logger.error(str(e)) - raise e - - for group in groups: - with engine.scoped_session() as session: - api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group, - permissions=db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - context = Context( - engine=engine, - es=es, - schema=schema, - username=username, - groups=groups, - cdkproxyurl=CDKPROXY_URL, - ) - return context.__dict__ - - -@app.route('/graphql', methods=['OPTIONS']) -def opt(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 - - -@app.route('/esproxy', methods=['OPTIONS']) -def esproxyopt(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 - - -@app.route('/graphql', methods=['GET']) -def graphql_playground(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return PLAYGROUND_HTML, 200 - - -@app.route('/esproxy', methods=['POST']) -def esproxy(): - body = request.data.decode('utf-8') - print(body) - return run_query(es=es, index='dataall-index', body=body) - - -@app.route('/graphql', methods=['POST']) -def graphql_server(): - print('.............................') - # GraphQL queries are always sent as POST - print(request.data) - data = request.get_json() - print(request_context(request.headers, mock=True)) - - # Note: Passing the request to the context is optional. - # In Flask, the current request is always accessible as flask.request - success, result = graphql_sync( - schema, - data, - context_value=request_context(request.headers, mock=True), - debug=app.debug, - ) - - status_code = 200 if success else 400 - return jsonify(result), status_code - - -if __name__ == '__main__': - logger.info('Starting dataall flask local application') - app.run( - debug=True, # nosec - threaded=False, - host='0.0.0.0', - port=5000, - ) diff --git a/backend/local.cdkapi.server.py b/backend/local_cdkapi_server.py similarity index 100% rename from backend/local.cdkapi.server.py rename to backend/local_cdkapi_server.py diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py new file mode 100644 index 000000000..f150b4edb --- /dev/null +++ b/backend/local_graphql_server.py @@ -0,0 +1,168 @@ +import os + +import boto3 +import jwt +from ariadne import graphql_sync +from ariadne.constants import PLAYGROUND_HTML +from flask import Flask, request, jsonify +from flask_cors import CORS + +from dataall.base.api import get_executable_schema +from dataall.core.tasks.service_handlers import Worker +from dataall.core.permissions import permissions +from dataall.core.permissions.db import save_permissions_with_tenant +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.base.db import get_engine, Base +from dataall.base.searchproxy import connect, run_query +from dataall.base.loader import load_modules, ImportMode +from dataall.base.config import config +from dataall.base.context import set_context, dispose_context, RequestContext + +import logging + +logger = logging.getLogger('graphql') +logger.propagate = False +logger.setLevel(logging.INFO) + +sts = boto3.client('sts', region_name='eu-west-1') +Worker.queue = Worker.process +ENVNAME = os.getenv('envname', 'local') +logger.warning(f'Connecting to database `{ENVNAME}`') +engine = get_engine(envname=ENVNAME) +es = connect(envname=ENVNAME) +logger.info('Connected') +# create_schema_and_tables(engine, envname=ENVNAME) +load_modules(modes={ImportMode.API, ImportMode.HANDLERS}) +Base.metadata.create_all(engine.engine) +CDKPROXY_URL = ( + 'http://cdkproxy:2805' if ENVNAME == 'dkrcompose' else 'http://localhost:2805' +) +config.set_property("cdk_proxy_url", CDKPROXY_URL) + +save_permissions_with_tenant(engine) + + +class Context: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +schema = get_executable_schema() +# app = GraphQL(schema, debug=True) + +app = Flask(__name__) +CORS(app) + + +def request_context(headers, mock=False): + if mock: + username = headers.get('username', 'anonymous@amazon.com') + groups = headers.get('groups', ['Scientists', 'DAAdministrators', 'Engineers', 'Other']) + else: + if not headers.get('Authorization'): + raise Exception('Missing Authorization header') + try: + decoded = jwt.decode(headers.get('Authorization'), options={"verify_signature": False}) + username = decoded.get('email', 'anonymous') + groups = [] + saml_groups = decoded.get('custom:saml.groups', []) + if len(saml_groups): + groups: list = ( + saml_groups.replace('[', '') + .replace(']', '') + .replace(', ', ',') + .split(',') + ) + cognito_groups = decoded.get('cognito:groups', []) + groups.extend(cognito_groups) + except Exception as e: + logger.error(str(e)) + raise e + + for group in groups: + with engine.scoped_session() as session: + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group, + permissions=permissions.TENANT_ALL, + tenant_name='dataall', + ) + + set_context(RequestContext(engine, username, groups)) + + # TODO: remove when the migration to a new RequestContext API is complete. Used only for backward compatibility + context = Context( + engine=engine, + schema=schema, + username=username, + groups=groups, + ) + return context.__dict__ + + +@app.route('/graphql', methods=['OPTIONS']) +def opt(): + # On GET request serve GraphQL Playground + # You don't need to provide Playground if you don't want to + # but keep on mind this will not prohibit clients from + # exploring your API using desktop GraphQL Playground app. + return '

Hello

', 200 + + +@app.route('/esproxy', methods=['OPTIONS']) +def esproxyopt(): + # On GET request serve GraphQL Playground + # You don't need to provide Playground if you don't want to + # but keep on mind this will not prohibit clients from + # exploring your API using desktop GraphQL Playground app. + return '

Hello

', 200 + + +@app.route('/graphql', methods=['GET']) +def graphql_playground(): + # On GET request serve GraphQL Playground + # You don't need to provide Playground if you don't want to + # but keep on mind this will not prohibit clients from + # exploring your API using desktop GraphQL Playground app. + return PLAYGROUND_HTML, 200 + + +@app.route('/esproxy', methods=['POST']) +def esproxy(): + body = request.data.decode('utf-8') + print(body) + return run_query(es=es, index='dataall-index', body=body) + + +@app.route('/graphql', methods=['POST']) +def graphql_server(): + print('.............................') + # GraphQL queries are always sent as POST + logger.debug(request.data) + data = request.get_json() + + context = request_context(request.headers, mock=True) + logger.debug(context) + + # Note: Passing the request to the context is optional. + # In Flask, the current request is always accessible as flask.request + success, result = graphql_sync( + schema, + data, + context_value=context, + debug=app.debug, + ) + + dispose_context() + status_code = 200 if success else 400 + return jsonify(result), status_code + + +if __name__ == '__main__': + logger.info('Starting dataall flask local application') + app.run( + debug=True, # nosec + threaded=False, + host='0.0.0.0', + port=5000, + ) diff --git a/backend/migrations/drop_tables.py b/backend/migrations/drop_tables.py index ac708900e..898660ec1 100644 --- a/backend/migrations/drop_tables.py +++ b/backend/migrations/drop_tables.py @@ -1,6 +1,6 @@ import os -from dataall.db.connection import ( +from dataall.base.db.connection import ( ENVNAME, get_engine, drop_schema_if_exists, diff --git a/backend/migrations/env.py b/backend/migrations/env.py index 0a1b939ca..277278357 100644 --- a/backend/migrations/env.py +++ b/backend/migrations/env.py @@ -4,8 +4,8 @@ # this is the Alembic Config object, which provides # access to the values within the .ini file in use. -from dataall.db.base import Base -from dataall.db.connection import ENVNAME, get_engine +from dataall.base.db.base import Base +from dataall.base.db.connection import ENVNAME, get_engine config = context.config diff --git a/backend/migrations/versions/033c3d6c1849_init_permissions.py b/backend/migrations/versions/033c3d6c1849_init_permissions.py index bc48176f9..09c90a57d 100644 --- a/backend/migrations/versions/033c3d6c1849_init_permissions.py +++ b/backend/migrations/versions/033c3d6c1849_init_permissions.py @@ -11,7 +11,7 @@ import sqlalchemy as sa from sqlalchemy import orm -from dataall.db import api, get_engine, has_table +from dataall.core.permissions.db import Permission # revision identifiers, used by Alembic. revision = '033c3d6c1849' @@ -26,7 +26,7 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Initializing permissions...') - api.Permission.init_permissions(session) + Permission.init_permissions(session) print('Permissions initialized successfully') except Exception as e: print(f'Failed to init permissions due to: {e}') diff --git a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py index 2cda5e189..8bcef8749 100644 --- a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py +++ b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py @@ -11,10 +11,16 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions, utils -from dataall.db.models.Enums import ShareObjectStatus, ShareableType, PrincipalType +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.base.db import utils +from dataall.core.permissions import permissions from datetime import datetime +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus + # revision identifiers, used by Alembic. revision = '04d92886fabe' down_revision = 'd922057f0d91' @@ -108,7 +114,7 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Re-Initializing permissions...') - api.Permission.init_permissions(session) + Permission.init_permissions(session) print('Permissions re-initialized successfully') except Exception as e: print(f'Failed to init permissions due to: {e}') @@ -118,18 +124,18 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Back-filling consumer role permissions for environments...') - envs = api.Environment.list_all_active_environments(session=session) + envs = EnvironmentService.list_all_active_environments(session=session) for env in envs: - groups = api.Environment.query_all_environment_groups( + groups = EnvironmentService.query_all_environment_groups( session=session, uri=env.environmentUri, filter=None ) for group in groups: - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, resource_uri=env.environmentUri, group=group.groupUri, permissions=permissions.CONSUMPTION_ENVIRONMENT_ROLE_ALL, - resource_type=models.Environment.__name__, + resource_type=Environment.__name__, ) print('Consumer Role Permissions created successfully') except Exception as e: diff --git a/backend/migrations/versions/166af5c0355b_release_3_7_1.py b/backend/migrations/versions/166af5c0355b_release_3_7_1.py index 46277fc3c..d9bef70a5 100644 --- a/backend/migrations/versions/166af5c0355b_release_3_7_1.py +++ b/backend/migrations/versions/166af5c0355b_release_3_7_1.py @@ -9,13 +9,12 @@ from alembic import op from sqlalchemy import Boolean, Column, String, DateTime, orm -from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import utils, Resource +from dataall.base.db import utils, Resource # revision identifiers, used by Alembic. -from dataall.utils.naming_convention import ( +from dataall.base.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) diff --git a/backend/migrations/versions/3ae3eeca475c_release_3_6_1.py b/backend/migrations/versions/3ae3eeca475c_release_3_6_1.py index 657434e00..bf956903d 100644 --- a/backend/migrations/versions/3ae3eeca475c_release_3_6_1.py +++ b/backend/migrations/versions/3ae3eeca475c_release_3_6_1.py @@ -14,7 +14,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import query_expression -from dataall.db import utils +from dataall.base.db import utils # revision identifiers, used by Alembic. diff --git a/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py b/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py new file mode 100644 index 000000000..7cc3ab5df --- /dev/null +++ b/backend/migrations/versions/4a0618805341_rename_sgm_studio_permissions.py @@ -0,0 +1,189 @@ +"""rename_sgm_studio_permissions + +Revision ID: 4a0618805341 +Revises: 92bdf9efb1aa +Create Date: 2023-05-17 13:39:00.974409 + +""" +from alembic import op +from sqlalchemy import String, orm, and_ + +from dataall.core.permissions.db.permission_repositories import Permission as PermissionService +from dataall.core.permissions.db.permission_models import Permission, TenantPolicyPermission, PermissionType +from dataall.modules.notebooks.services.notebook_permissions import MANAGE_NOTEBOOKS +from dataall.modules.mlstudio.services.mlstudio_permissions import ( + MANAGE_SGMSTUDIO_USERS, +) + + +# revision identifiers, used by Alembic. +revision = '4a0618805341' +down_revision = '92bdf9efb1aa' +branch_labels = None +depends_on = None + +# Define constants +CREATE_SGMSTUDIO_NOTEBOOK = 'CREATE_SGMSTUDIO_NOTEBOOK' +LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS = 'LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS' + +GET_SGMSTUDIO_NOTEBOOK = 'GET_SGMSTUDIO_NOTEBOOK' +UPDATE_SGMSTUDIO_NOTEBOOK = 'UPDATE_SGMSTUDIO_NOTEBOOK' +DELETE_SGMSTUDIO_NOTEBOOK = 'DELETE_SGMSTUDIO_NOTEBOOK' +SGMSTUDIO_NOTEBOOK_URL = 'SGMSTUDIO_NOTEBOOK_URL' + +OLD_PERMISSIONS = [ + CREATE_SGMSTUDIO_NOTEBOOK, + LIST_ENVIRONMENT_SGMSTUDIO_NOTEBOOKS, + GET_SGMSTUDIO_NOTEBOOK, + UPDATE_SGMSTUDIO_NOTEBOOK, + DELETE_SGMSTUDIO_NOTEBOOK, + SGMSTUDIO_NOTEBOOK_URL +] +old_permissions = {k: k for k in OLD_PERMISSIONS} +old_permissions[CREATE_SGMSTUDIO_NOTEBOOK] = 'Create ML Studio profiles on this environment' + + +CREATE_SGMSTUDIO_USER = 'CREATE_SGMSTUDIO_USER' +LIST_ENVIRONMENT_SGMSTUDIO_USERS = 'LIST_ENVIRONMENT_SGMSTUDIO_USERS' + +GET_SGMSTUDIO_USER = 'GET_SGMSTUDIO_USER' +UPDATE_SGMSTUDIO_USER = 'UPDATE_SGMSTUDIO_USER' +DELETE_SGMSTUDIO_USER = 'DELETE_SGMSTUDIO_USER' +SGMSTUDIO_USER_URL = 'SGMSTUDIO_USER_URL' + +NEW_PERMISSIONS = [ + CREATE_SGMSTUDIO_USER, + LIST_ENVIRONMENT_SGMSTUDIO_USERS, + GET_SGMSTUDIO_USER, + UPDATE_SGMSTUDIO_USER, + DELETE_SGMSTUDIO_USER, + SGMSTUDIO_USER_URL +] +new_permissions = {k: k for k in NEW_PERMISSIONS} +new_permissions[CREATE_SGMSTUDIO_USER] = 'Create SageMaker Studio users on this environment' + + +def upgrade(): + """ + The script does the following migration: + 1) create missing permissions MANAGE_SGMSTUDIO_USERS from MANAGE_NOTEBOOKS tenant permission + 2) Rename SageMaker Studio permissions from SGMSTUDIO_NOTEBOOK to SGMSTUDIO_USER + 3) Rename sagemaker_studio_user_profile column names + """ + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + + print("Creating new permission MANAGE_SGMSTUDIO_USERS to distinguish from MANAGE_NOTEBOOKS...") + + manage_mlstudio_permission = PermissionService.save_permission( + session=session, name=MANAGE_SGMSTUDIO_USERS, description="Allow MANAGE_SGMSTUDIO_USERS", permission_type=PermissionType.TENANT.name + ) + session.commit() + print(f"manage_mlstudio_permission_uri = {manage_mlstudio_permission.permissionUri}") + manage_notebooks_permission = ( + session.query(Permission) + .filter(and_( + Permission.name == MANAGE_NOTEBOOKS, + Permission.type == PermissionType.TENANT.name + )) + .first() + ) + print(f"manage_notebooks_permission_uri = {manage_notebooks_permission.permissionUri}") + tenant_permissions = ( + session.query(TenantPolicyPermission) + .filter(TenantPolicyPermission.permissionUri == manage_notebooks_permission.permissionUri) + .all() + ) + for permission in tenant_permissions: + print(permission.permissionUri) + existing_tenant_permissions = ( + session.query(TenantPolicyPermission) + .filter(and_( + TenantPolicyPermission.permissionUri == manage_mlstudio_permission.permissionUri, + TenantPolicyPermission.sid == permission.sid + )) + .first() + ) + + if existing_tenant_permissions: + print(f"Permission already exists {existing_tenant_permissions.permissionUri}, skipping...") + else: + print("Permission does not exist, adding it...") + session.add(TenantPolicyPermission( + sid=permission.sid, + permissionUri=manage_mlstudio_permission.permissionUri, + )) + + session.commit() + + print("Renaming SageMaker Studio permissions from SGMSTUDIO_NOTEBOOK to SGMSTUDIO_USER...") + + for old, new in zip(list(old_permissions.items()), list(new_permissions.items())): + print(f"Updating permission table {old[0]} to {new[0]}, description:{new[1]}") + session.query(Permission).filter(Permission.name == old[0]).update({Permission.name: new[0], Permission.description: new[1]}, synchronize_session=False) + session.commit() + + print("Renaming columns of sagemaker_studio_user_profile...") + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserProfileUri', nullable=False, + new_column_name='sagemakerStudioUserUri', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserProfileStatus', nullable=False, + new_column_name='sagemakerStudioUserStatus', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserProfileName', nullable=False, + new_column_name='sagemakerStudioUserName', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserProfileNameSlugify', nullable=False, + new_column_name='sagemakerStudioUserNameSlugify', existing_type=String) + except Exception as e: + print(f"Failed to execute the migration script due to: {e}") + + +def downgrade(): + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + + print("Dropping new permission added to MANAGE_SGMSTUDIO_USERS to distinguish from MANAGE_NOTEBOOKS...") + manage_mlstudio_permission = ( + session.query(Permission) + .filter(and_( + Permission.name == MANAGE_SGMSTUDIO_USERS, + Permission.type == PermissionType.TENANT.name + )) + .first() + ) + print(f"manage_mlstudio_permission_uri = {manage_mlstudio_permission.permissionUri}") + tenant_permissions = ( + session.query(TenantPolicyPermission) + .filter(TenantPolicyPermission.permissionUri == manage_mlstudio_permission.permissionUri) + .delete() + ) + + manage_mlstudio_permission = ( + session.query(Permission) + .filter(and_( + Permission.name == MANAGE_SGMSTUDIO_USERS, + Permission.type == PermissionType.TENANT.name + )) + .delete() + ) + session.commit() + + print("Renaming SageMaker Studio permissions from SGMSTUDIO_USER to SGMSTUDIO_NOTEBOOK...") + for old, new in zip(list(old_permissions.items()), list(new_permissions.items())): + print(f"Updating permission table {new[0]} to name={old[0]}, description={old[1]}") + session.query(Permission).filter(Permission.name == new[0]).update( + {Permission.name: old[0], Permission.description: old[1]}, synchronize_session=False) + session.commit() + + print("Renaming columns of sagemaker_studio_user_profile...") + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserUri', nullable=False, + new_column_name='sagemakerStudioUserProfileUri', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserStatus', nullable=False, + new_column_name='sagemakerStudioUserProfileStatus', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserName', nullable=False, + new_column_name='sagemakerStudioUserProfileName', existing_type=String) + op.alter_column('sagemaker_studio_user_profile', 'sagemakerStudioUserNameSlugify', nullable=False, + new_column_name='sagemakerStudioUserProfileNameSlugify', existing_type=String) + + except Exception as e: + print(f"Failed to execute the migration script due to: {e}") diff --git a/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py b/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py index a5f5e74b0..c862617f6 100644 --- a/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py +++ b/backend/migrations/versions/509997f0a51e_sharing_state_machines_v1_4_0.py @@ -11,10 +11,10 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions, utils -from dataall.db.models.Enums import ShareObjectStatus, ShareItemStatus +from dataall.base.db import utils from datetime import datetime +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus # revision identifiers, used by Alembic. revision = '509997f0a51e' diff --git a/backend/migrations/versions/5e5c84138af7_backfill_confidentiality.py b/backend/migrations/versions/5e5c84138af7_backfill_confidentiality.py index 123151d99..b76201a1a 100644 --- a/backend/migrations/versions/5e5c84138af7_backfill_confidentiality.py +++ b/backend/migrations/versions/5e5c84138af7_backfill_confidentiality.py @@ -12,7 +12,7 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import utils, Resource +from dataall.base.db import utils, Resource revision = '5e5c84138af7' down_revision = '94697ee46c0c' diff --git a/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py new file mode 100644 index 000000000..08f655772 --- /dev/null +++ b/backend/migrations/versions/5fc49baecea4_add_enviromental_parameters.py @@ -0,0 +1,252 @@ +"""add_enviromental_parameters + +Revision ID: 5fc49baecea4 +Revises: e1cd4927482b +Create Date: 2023-02-20 14:28:13.331670 + +""" +import sqlalchemy as sa + +from typing import List + +from alembic import op +from sqlalchemy import Boolean, Column, String, orm +from sqlalchemy.ext.declarative import declarative_base + +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.base.db import Resource +from dataall.core.permissions.db.permission_models import PermissionType, ResourcePolicyPermission, \ + TenantPolicyPermission +from dataall.modules.datasets.services.dataset_permissions import LIST_ENVIRONMENT_DATASETS, CREATE_DATASET + +# revision identifiers, used by Alembic. +revision = "5fc49baecea4" +down_revision = "e1cd4927482b" +branch_labels = None +depends_on = None + +Base = declarative_base() + +UNUSED_RESOURCE_PERMISSIONS = [ + 'LIST_DATASETS', 'LIST_DATASET_TABLES', 'LIST_DATASET_SHARES', 'SUMMARY_DATASET', + 'UPLOAD_DATASET', 'URL_DATASET', 'STACK_DATASET', 'SUBSCRIPTIONS_DATASET', + 'CREATE_DATASET_TABLE', 'LIST_PIPELINES', 'DASHBOARD_URL', 'GET_REDSHIFT_CLUSTER', + 'SHARE_REDSHIFT_CLUSTER', 'DELETE_REDSHIFT_CLUSTER', 'REBOOT_REDSHIFT_CLUSTER', 'RESUME_REDSHIFT_CLUSTER', + 'PAUSE_REDSHIFT_CLUSTER', 'ADD_DATASET_TO_REDSHIFT_CLUSTER', 'LIST_REDSHIFT_CLUSTER_DATASETS', + 'REMOVE_DATASET_FROM_REDSHIFT_CLUSTER', 'ENABLE_REDSHIFT_TABLE_COPY', 'DISABLE_REDSHIFT_TABLE_COPY', + 'GET_REDSHIFT_CLUSTER_CREDENTIALS', 'CREATE_REDSHIFT_CLUSTER', 'LIST_ENVIRONMENT_REDSHIFT_CLUSTERS' +] + +UNUSED_TENANT_PERMISSIONS = [ + 'MANAGE_REDSHIFT_CLUSTERS' +] + + +class Environment(Resource, Base): + __tablename__ = "environment" + environmentUri = Column(String, primary_key=True) + notebooksEnabled = Column(Boolean) + mlStudiosEnabled = Column(Boolean) + pipelinesEnabled = Column(Boolean) + dashboardsEnabled = Column(Boolean) + warehousesEnabled = Column(Boolean) + + +class EnvironmentParameter(Base): + __tablename__ = "environment_parameters" + environmentUri = Column(String, primary_key=True) + paramKey = Column(String, primary_key=True), + paramValue = Column(String, nullable=True) + + +class SagemakerNotebook(Resource, Base): + __tablename__ = "sagemaker_notebook" + environmentUri = Column(String, nullable=False) + notebookUri = Column(String, primary_key=True) + + +def upgrade(): + """ + The script does the following migration: + 1) creation of the environment_parameters and environment_resources tables + 2) Migration xxxEnabled to the environment_parameters table + 3) Dropping the xxxEnabled columns from the environment_parameters + """ + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + + print("Creating environment_parameters table...") + op.create_table( + "environment_parameters", + Column("environmentUri", String, primary_key=True), + Column("paramKey", String, primary_key=True), + Column("paramValue", String, nullable=False), + ) + print("Creation of environment_parameters table is done") + + print("Migrating the environmental parameters from environment table to environment_parameters table...") + envs: List[Environment] = session.query(Environment).all() + params: List[EnvironmentParameter] = [] + for env in envs: + _add_param_if_exists( + params, env, "notebooksEnabled", str(env.notebooksEnabled).lower() # for frontend + ) + _add_param_if_exists( + params, env, "mlStudiosEnabled", str(env.mlStudiosEnabled).lower() # for frontend + ) + _add_param_if_exists( + params, env, "pipelinesEnabled", str(env.pipelinesEnabled).lower() # for frontend + ) + _add_param_if_exists( + params, env, "dashboardsEnabled", str(env.dashboardsEnabled).lower() # for frontend + ) + + session.add_all(params) + print("Migration of the environmental parameters has been complete") + + op.drop_column("environment", "notebooksEnabled") + op.drop_column("environment", "mlStudiosEnabled") + op.drop_column("environment", "pipelinesEnabled") + op.drop_column("environment", "dashboardsEnabled") + op.drop_column("environment", "warehousesEnabled") + print("Dropped the columns from the environment table ") + + create_foreign_key_to_env(op, 'sagemaker_notebook') + create_foreign_key_to_env(op, 'dataset') + create_foreign_key_to_env(op, 'sagemaker_studio_user_profile') + create_foreign_key_to_env(op, 'redshiftcluster') + create_foreign_key_to_env(op, 'datapipeline') + create_foreign_key_to_env(op, 'dashboard') + + session.commit() + + migrate_groups_permissions(session) + delete_unused_permissions(session) + + op.drop_table("tenant_administrator") + + except Exception as ex: + print(f"Failed to execute the migration script due to: {ex}") + + +def downgrade(): + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + + print("dropping foreign keys and adding columns to environment table...") + + op.drop_constraint("fk_sagemaker_notebook_env_uri", "sagemaker_notebook") + op.drop_constraint("fk_dataset_env_uri", "dataset") + op.drop_constraint("fk_sagemaker_studio_user_profile_env_uri", "sagemaker_studio_user_profile") + op.drop_constraint("fk_redshiftcluster_env_uri", "redshiftcluster") + op.drop_constraint("fk_datapipeline_env_uri", "datapipeline") + op.drop_constraint("fk_dashboard_env_uri", "dashboard") + op.add_column("environment", Column("notebooksEnabled", Boolean, default=True)) + op.add_column("environment", Column("mlStudiosEnabled", Boolean, default=True)) + op.add_column("environment", Column("pipelinesEnabled", Boolean, default=True)) + op.add_column("environment", Column("dashboardsEnabled", Boolean, default=True)) + op.add_column("environment", Column("warehousesEnabled", Boolean, default=True)) + + print("Filling environment table with parameters rows...") + params = session.query(EnvironmentParameter).all() + envs = [] + for param in params: + print(param) + envs.append(Environment( + environmentUri=param.environmentUri, + notebooksEnabled=params["notebooksEnabled"] == "true", + mlStudiosEnabled=params["mlStudiosEnabled"] == "true", + pipelinesEnabled=params["pipelinesEnabled"] == "true", + dashboardsEnabled=params["dashboardsEnabled"] == "true", + warehousesEnabled=params["warehousesEnabled"] == "true", + )) + + save_deleted_permissions(session) + + session.add_all(envs) + print("Dropping environment_parameter table...") + op.drop_table("environment_parameters") + + except Exception as ex: + print(f"Failed to execute the rollback script due to: {ex}") + + +def _add_param_if_exists(params: List[EnvironmentParameter], env: Environment, key, val) -> None: + if val is not None: + params.append(EnvironmentParameter( + environmentUri=env.environmentUri, + paramKey=key, + paramValue=str(val).lower() + )) + + +def create_foreign_key_to_env(op, table: str): + op.create_foreign_key( + f"fk_{table}_env_uri", + table, "environment", + ["environmentUri"], ["environmentUri"], + ) + + +def find_all_groups(session): + return session.query(EnvironmentGroup).all() + + +def migrate_groups_permissions(session): + """ + Adds new permission if the old exist. needed to get rid of old hacks in the code + """ + permissions = [CREATE_DATASET, LIST_ENVIRONMENT_DATASETS] + + groups = find_all_groups(session) + for group in groups: + new_perms = [] + for existed, to_add in permissions: + if not ResourcePolicy.has_group_resource_permission( + session, + group_uri=group, + permission_name=existed, + resource_uri=group.environmentUri, + ): + new_perms.append(to_add) + + if new_perms: + ResourcePolicy.attach_resource_policy( + session=session, + group=group.groupUri, + permissions=new_perms, + resource_uri=group.environmentUri, + resource_type=Environment.__name__ + ) + + +def delete_unused_permissions(session): + for name in UNUSED_RESOURCE_PERMISSIONS: + perm = Permission.get_permission_by_name(session, name, PermissionType.RESOURCE.value) + ( + session.query(ResourcePolicyPermission) + .filter(ResourcePolicyPermission.permissionUri == perm.permissionUri) + .delete() + ) + session.delete(perm) + + for name in UNUSED_TENANT_PERMISSIONS: + perm = Permission.get_permission_by_name(session, name, PermissionType.TENANT.value) + ( + session.query(TenantPolicyPermission) + .filter(TenantPolicyPermission.permissionUri == perm.permissionUri) + .delete() + ) + session.delete(perm) + + +def save_deleted_permissions(session): + for name in UNUSED_RESOURCE_PERMISSIONS: + Permission.save_permission(session, name, name, PermissionType.RESOURCE.value) + + for name in UNUSED_TENANT_PERMISSIONS: + Permission.save_permission(session, name, name, PermissionType.TENANT.value) diff --git a/backend/migrations/versions/72b8a90b6ee8__share_request_purpose.py b/backend/migrations/versions/72b8a90b6ee8__share_request_purpose.py index 1b358b11d..3c331754e 100644 --- a/backend/migrations/versions/72b8a90b6ee8__share_request_purpose.py +++ b/backend/migrations/versions/72b8a90b6ee8__share_request_purpose.py @@ -9,7 +9,11 @@ from sqlalchemy import orm, Column, String, and_ from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_APPROVER, SHARE_OBJECT_REQUESTER +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository # revision identifiers, used by Alembic. revision = '72b8a90b6ee8' @@ -31,15 +35,15 @@ def upgrade(): session = orm.Session(bind=bind) print('Getting all Share Objects...') - shares: [models.ShareObject] = session.query(models.ShareObject).all() + shares: [ShareObject] = session.query(ShareObject).all() for share in shares: - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - environment = api.Environment.get_environment_by_uri(session, share.environmentUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + environment = EnvironmentService.get_environment_by_uri(session, share.environmentUri) # Env Admins # Delete Share Object Permissions on Share Env Admin if Not Share Requester Group if share.groupUri != environment.SamlGroupName: - api.ResourcePolicy.delete_resource_policy( + ResourcePolicy.delete_resource_policy( session=session, group=environment.SamlGroupName, resource_uri=share.shareUri, @@ -48,34 +52,34 @@ def upgrade(): # Dataset Admins # Delete and Recreate Dataset Share Object Permissions to be Share Object Approver Permission Set - api.ResourcePolicy.delete_resource_policy( + ResourcePolicy.delete_resource_policy( session=session, group=dataset.SamlAdminGroupName, resource_uri=share.shareUri, ) - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_APPROVER, + permissions=SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) print(f"Recreated SHARE_OBJECT_APPROVER Permissions for Dataset Owner {dataset.SamlAdminGroupName} on Share {share.shareUri}") # Dataset Stewards # Delete and Recreate Dataset Share Object Permissions to be Share Object Approver Permission Set if dataset.SamlAdminGroupName != dataset.stewards: - api.ResourcePolicy.delete_resource_policy( + ResourcePolicy.delete_resource_policy( session=session, group=dataset.stewards, resource_uri=share.shareUri, ) - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, group=dataset.stewards, - permissions=permissions.SHARE_OBJECT_APPROVER, + permissions=SHARE_OBJECT_APPROVER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) print(f"Recreated SHARE_OBJECT_APPROVER Permissions for Dataset Steward {dataset.stewards} on Share {share.shareUri}") @@ -93,19 +97,19 @@ def downgrade(): session = orm.Session(bind=bind) print('Getting all Share Objects...') - shares: [models.ShareObject] = session.query(models.ShareObject).all() + shares: [ShareObject] = session.query(ShareObject).all() for share in shares: - dataset = api.Dataset.get_dataset_by_uri(session, share.datasetUri) - environment = api.Environment.get_environment_by_uri(session, share.environmentUri) + dataset = DatasetRepository.get_dataset_by_uri(session, share.datasetUri) + environment = EnvironmentService.get_environment_by_uri(session, share.environmentUri) # Env Admins # Add SHARE_OBJECT_REQUESTER to Env Admin Group - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, group=environment.SamlGroupName, - permissions=permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) print(f"Adding SHARE_OBJECT_REQUESTER Permissions for Share Env Admin {environment.SamlGroupName} on Share {share.shareUri}") @@ -113,17 +117,17 @@ def downgrade(): # Remove SHARE_OBJECT_APPROVER Permissions if Exists Separate from Stewards(i.e. if steward != owner) # Add SHARE_OBJECT_REQUESTER Permissions to Dataset Admin Group if dataset.SamlAdminGroupName != dataset.stewards: - api.ResourcePolicy.delete_resource_policy( + ResourcePolicy.delete_resource_policy( session=session, group=dataset.SamlAdminGroupName, resource_uri=share.shareUri, ) - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, group=dataset.SamlAdminGroupName, - permissions=permissions.SHARE_OBJECT_REQUESTER, + permissions=SHARE_OBJECT_REQUESTER, resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, + resource_type=ShareObject.__name__, ) print(f"Adding SHARE_OBJECT_REQUESTER Permissions for Dataset Owner {dataset.SamlAdminGroupName} on Share {share.shareUri}") except Exception as e: diff --git a/backend/migrations/versions/92bdf9efb1aa_remove_unused_tables.py b/backend/migrations/versions/92bdf9efb1aa_remove_unused_tables.py new file mode 100644 index 000000000..82f126b50 --- /dev/null +++ b/backend/migrations/versions/92bdf9efb1aa_remove_unused_tables.py @@ -0,0 +1,50 @@ +"""remove_unused_tables + +Revision ID: 92bdf9efb1aa +Revises: 5fc49baecea4 +Create Date: 2023-05-22 10:00:07.432462 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy import orm + + +# revision identifiers, used by Alembic. +revision = '92bdf9efb1aa' +down_revision = '5fc49baecea4' +branch_labels = None +depends_on = None + + +def upgrade(): + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + print("Dropping worksheet_share table...") + op.drop_table('worksheet_share') + session.commit() + except Exception as e: + print(f"Failed to execute the migration script due to: {e}") + + +def downgrade(): + try: + bind = op.get_bind() + session = orm.Session(bind=bind) + print("Creating worksheet_share table...") + op.create_table( + 'worksheet_share', + sa.Column('worksheetShareUri', sa.String(), nullable=False), + sa.Column('worksheetUri', sa.String(), nullable=False), + sa.Column('principalId', sa.String(), nullable=False), + sa.Column('principalType', sa.String(), nullable=False), + sa.Column('canEdit', sa.Boolean(), nullable=True), + sa.Column('owner', sa.String(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('updated', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('worksheetShareUri'), + ) + session.commit() + except Exception as e: + print(f"Failed to execute the migration script due to: {e}") diff --git a/backend/migrations/versions/97050ec09354_release_3_7_8.py b/backend/migrations/versions/97050ec09354_release_3_7_8.py index fd0fdce87..469fbb3e0 100644 --- a/backend/migrations/versions/97050ec09354_release_3_7_8.py +++ b/backend/migrations/versions/97050ec09354_release_3_7_8.py @@ -5,20 +5,15 @@ Create Date: 2021-12-08 12:54:33.828838 """ -import datetime from alembic import op -from sqlalchemy import Boolean, Column, String, DateTime, orm +from sqlalchemy import Boolean, Column, String, orm from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import utils, Resource +from dataall.base.db import utils, Resource # revision identifiers, used by Alembic. -from dataall.utils.naming_convention import ( - NamingConventionService, - NamingConventionPattern, -) # revision identifiers, used by Alembic. revision = '97050ec09354' diff --git a/backend/migrations/versions/b6e0ac8f6d3f_add_env_feature_flags.py b/backend/migrations/versions/b6e0ac8f6d3f_add_env_feature_flags.py index df90288ca..fcbc478ee 100644 --- a/backend/migrations/versions/b6e0ac8f6d3f_add_env_feature_flags.py +++ b/backend/migrations/versions/b6e0ac8f6d3f_add_env_feature_flags.py @@ -12,7 +12,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import query_expression -from dataall.db import Resource, utils +from dataall.base.db import Resource, utils revision = 'b6e0ac8f6d3f' down_revision = '967fa9c0a147' diff --git a/backend/migrations/versions/bc6ff74a16bc_clean_up.py b/backend/migrations/versions/bc6ff74a16bc_clean_up.py index 9636d313b..3f084172b 100644 --- a/backend/migrations/versions/bc6ff74a16bc_clean_up.py +++ b/backend/migrations/versions/bc6ff74a16bc_clean_up.py @@ -8,7 +8,7 @@ from alembic import op import os import sqlalchemy as sa -from dataall.db import get_engine, has_table, create_schema_if_not_exists +from dataall.base.db import get_engine, has_table, create_schema_if_not_exists # revision identifiers, used by Alembic. diff --git a/backend/migrations/versions/bd271a2780b2_init_database.py b/backend/migrations/versions/bd271a2780b2_init_database.py index e84ce2062..0013c0579 100644 --- a/backend/migrations/versions/bd271a2780b2_init_database.py +++ b/backend/migrations/versions/bd271a2780b2_init_database.py @@ -11,7 +11,7 @@ from alembic import op from sqlalchemy.dialects import postgresql -from dataall.db import get_engine, has_table, create_schema_if_not_exists +from dataall.base.db import get_engine, has_table, create_schema_if_not_exists revision = 'bd271a2780b2' down_revision = None diff --git a/backend/migrations/versions/c5c6bbbc5de7_release_3_5_0.py b/backend/migrations/versions/c5c6bbbc5de7_release_3_5_0.py index 34aabdf2c..88850b8e9 100644 --- a/backend/migrations/versions/c5c6bbbc5de7_release_3_5_0.py +++ b/backend/migrations/versions/c5c6bbbc5de7_release_3_5_0.py @@ -11,7 +11,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import query_expression -from dataall.db import Resource, utils +from dataall.base.db import Resource, utils # revision identifiers, used by Alembic. revision = 'c5c6bbbc5de7' diff --git a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py index d75e7d6cc..67a8cfb50 100644 --- a/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py +++ b/backend/migrations/versions/d05f9a5b215e_backfill_dataset_table_permissions.py @@ -6,15 +6,20 @@ """ from alembic import op -import sqlalchemy as sa from sqlalchemy import orm, Column, String, Text, DateTime, and_ from sqlalchemy.orm import query_expression from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.db import api, models, permissions, utils, Resource -from datetime import datetime -from dataall.db.models.Enums import ShareObjectStatus, ShareableType +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.base.db import utils, Resource +from datetime import datetime +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareableType, ShareItemStatus +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ # revision identifiers, used by Alembic. revision = 'd05f9a5b215e' @@ -74,7 +79,7 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Re-Initializing permissions...') - api.Permission.init_permissions(session) + Permission.init_permissions(session) print('Permissions re-initialized successfully') except Exception as e: print(f'Failed to init permissions due to: {e}') @@ -85,17 +90,17 @@ def upgrade(): print('Back-filling dataset table permissions for owners/stewards...') dataset_tables: [DatasetTable] = session.query(DatasetTable).filter(DatasetTable.deleted.is_(None)).all() for table in dataset_tables: - dataset = api.Dataset.get_dataset_by_uri(session, table.datasetUri) - env = api.Environment.get_environment_by_uri(session, dataset.environmentUri) + dataset = DatasetRepository.get_dataset_by_uri(session, table.datasetUri) + env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) groups = set([dataset.SamlAdminGroupName, env.SamlGroupName, dataset.stewards if dataset.stewards is not None else dataset.SamlAdminGroupName]) for group in groups: - api.ResourcePolicy.attach_resource_policy( + ResourcePolicy.attach_resource_policy( session=session, resource_uri=table.tableUri, group=group, - permissions=permissions.DATASET_TABLE_READ, - resource_type=models.DatasetTable.__name__, + permissions=DATASET_TABLE_READ, + resource_type=DatasetTable.__name__, ) print('dataset table permissions updated successfully for owners/stewards') except Exception as e: @@ -108,19 +113,19 @@ def upgrade(): share_table_items: [ShareObjectItem] = session.query(ShareObjectItem).filter( ( and_( - ShareObjectItem.status == ShareObjectStatus.Share_Succeeded.value, + ShareObjectItem.status == ShareItemStatus.Share_Succeeded.value, ShareObjectItem.itemType == ShareableType.Table.value ) ) ).all() for shared_table in share_table_items: - share = api.ShareObject.get_share_by_uri(session, shared_table.shareUri) - api.ResourcePolicy.attach_resource_policy( + share = ShareObjectRepository.get_share_by_uri(session, shared_table.shareUri) + ResourcePolicy.attach_resource_policy( session=session, group=share.principalId, - permissions=permissions.DATASET_TABLE_READ, + permissions=DATASET_TABLE_READ, resource_uri=shared_table.itemUri, - resource_type=models.DatasetTable.__name__, + resource_type=DatasetTable.__name__, ) print('dataset table permissions updated for all shared tables') except Exception as e: diff --git a/backend/migrations/versions/e177eb044b31_init_tenant.py b/backend/migrations/versions/e177eb044b31_init_tenant.py index 8cff870d2..abd29e630 100644 --- a/backend/migrations/versions/e177eb044b31_init_tenant.py +++ b/backend/migrations/versions/e177eb044b31_init_tenant.py @@ -10,8 +10,9 @@ # revision identifiers, used by Alembic. from sqlalchemy import orm -from dataall import db -from dataall.db import api +from dataall.core.permissions.db.tenant_repositories import Tenant +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.core.permissions.permissions import TENANT_ALL revision = 'e177eb044b31' down_revision = '033c3d6c1849' @@ -24,13 +25,13 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Initializing permissions...') - db.api.Tenant.save_tenant(session, name='dataall', description='Tenant dataall') + Tenant.save_tenant(session, name='dataall', description='Tenant dataall') print('Tenant initialized successfully') print('Attaching superusers group DHAdmins...') - api.TenantPolicy.attach_group_tenant_policy( + TenantPolicy.attach_group_tenant_policy( session=session, group='DHAdmins', - permissions=db.permissions.TENANT_ALL, + permissions=TENANT_ALL, tenant_name='dataall', ) print('Attaching superusers groups DHAdmins') diff --git a/backend/migrations/versions/e1cd4927482b_rename_imported_dataset_aws_resources.py b/backend/migrations/versions/e1cd4927482b_rename_imported_dataset_aws_resources.py index 9f77df9cc..a03ea9778 100644 --- a/backend/migrations/versions/e1cd4927482b_rename_imported_dataset_aws_resources.py +++ b/backend/migrations/versions/e1cd4927482b_rename_imported_dataset_aws_resources.py @@ -12,8 +12,8 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import query_expression -from dataall.db import utils, Resource -from dataall.utils.naming_convention import ( +from dataall.base.db import utils, Resource +from dataall.base.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) diff --git a/backend/requirements.txt b/backend/requirements.txt index 16f1de1bc..eaa4b6435 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -14,4 +14,5 @@ PyYAML==6.0 requests==2.31.0 requests_aws4auth==1.1.1 sqlalchemy==1.3.24 -starlette==0.27.0 \ No newline at end of file +starlette==0.27.0 +deprecated==1.2.13 \ No newline at end of file diff --git a/backend/search_handler.py b/backend/search_handler.py index 6684625bc..5a3a1318c 100644 --- a/backend/search_handler.py +++ b/backend/search_handler.py @@ -1,7 +1,7 @@ import json import os -from dataall.searchproxy import connect, run_query +from dataall.base.searchproxy import connect, run_query ENVNAME = os.getenv('envname', 'local') es = connect(envname=ENVNAME) diff --git a/cdk.json b/cdk.json index eda207219..0da6190c4 100644 --- a/cdk.json +++ b/cdk.json @@ -2,7 +2,7 @@ "app": "python ./deploy/app.py", "context": { "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": false, - "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": false, + "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, "@aws-cdk/aws-rds:lowercaseDbIdentifier": false, "@aws-cdk/core:stackRelativeExports": false } diff --git a/config.json b/config.json new file mode 100644 index 000000000..9a1d84fd0 --- /dev/null +++ b/config.json @@ -0,0 +1,32 @@ +{ + "modules": { + "mlstudio": { + "active": true + }, + "notebooks": { + "active": true + }, + "datapipelines": { + "active": true + }, + "datasets": { + "active": true, + "features": { + "file_uploads": true, + "file_actions": true, + "aws_actions": true + } + }, + "worksheets": { + "active": true + }, + "dashboards": { + "active": true + } + }, + "core": { + "features": { + "env_aws_actions": true + } + } +} \ No newline at end of file diff --git a/deploy/pivot_role/pivotRole.yaml b/deploy/pivot_role/pivotRole.yaml index 3a4ce3243..6a2154adb 100644 --- a/deploy/pivot_role/pivotRole.yaml +++ b/deploy/pivot_role/pivotRole.yaml @@ -386,7 +386,7 @@ Resources: - "quicksight:DescribeDashboard" - "quicksight:DescribeUser" - "quicksight:SearchDashboards" - - "quicksight:GetDashboardEmbedUrl" + - "quicksight:GenerateEmbedUrlForRegisteredUser" - "quicksight:GenerateEmbedUrlForAnonymousUser" - "quicksight:UpdateUser" - "quicksight:ListUserGroups" diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py index be002fd30..277cefc02 100644 --- a/deploy/stacks/container.py +++ b/deploy/stacks/container.py @@ -1,3 +1,4 @@ +from typing import Dict from aws_cdk import ( aws_ec2 as ec2, aws_ecs as ecs, @@ -11,6 +12,7 @@ from aws_cdk.aws_applicationautoscaling import Schedule from .pyNestedStack import pyNestedClass +from .run_if import run_if class ContainerStack(pyNestedClass): @@ -32,16 +34,21 @@ def __init__( **kwargs, ): super().__init__(scope, id, **kwargs) + self._envname = envname + self._resource_prefix = resource_prefix if self.node.try_get_context('image_tag'): image_tag = self.node.try_get_context('image_tag') - cdkproxy_image_tag = f'cdkproxy-{image_tag}' - + self._cdkproxy_image_tag = f'cdkproxy-{image_tag}' + self._ecr_repository = ecr_repository + self._vpc = vpc + self._prod_sizing = prod_sizing + (self.scheduled_tasks_sg, self.share_manager_sg) = self.create_ecs_security_groups( - envname, - resource_prefix, - vpc, + envname, + resource_prefix, + vpc, vpce_connection, s3_prefix_list, lambdas @@ -65,7 +72,7 @@ def __init__( ) cdkproxy_image = ecs.ContainerImage.from_ecr_repository( repository=ecr_repository, - tag=cdkproxy_image_tag + tag=self._cdkproxy_image_tag ) cdkproxy_task_definition = ecs.CfnTaskDefinition( @@ -74,7 +81,7 @@ def __init__( container_definitions=[ecs.CfnTaskDefinition.ContainerDefinitionProperty( image=cdkproxy_image.image_name, name=cdkproxy_container_name, - command=['python3.8', '-m', 'dataall.tasks.cdkproxy'], + command=['python3.8', '-m', 'dataall.core.stacks.tasks.cdkproxy'], environment=[ ecs.CfnTaskDefinition.KeyValuePairProperty( name="AWS_REGION", @@ -88,6 +95,10 @@ def __init__( name="LOGLEVEL", value="DEBUG" ), + ecs.CfnTaskDefinition.KeyValuePairProperty( + name="config_location", + value="/config.json" + ), ], essential=True, log_configuration=ecs.CfnTaskDefinition.LogConfigurationProperty( @@ -143,40 +154,13 @@ def __init__( string_value=cdkproxy_container_name, ) - sync_tables_task, sync_tables_task_def = self.set_scheduled_task( - cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.tables_syncer'], - container_id=f'container', - ecr_repository=ecr_repository, - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'INFO', - }, - image_tag=cdkproxy_image_tag, - log_group=self.create_log_group( - envname, resource_prefix, log_group_name='tables-syncer' - ), - schedule_expression=Schedule.expression('rate(15 minutes)'), - scheduled_task_id=f'{resource_prefix}-{envname}-tables-syncer-schedule', - task_id=f'{resource_prefix}-{envname}-tables-syncer', - task_role=self.task_role, - vpc=vpc, - security_group=self.scheduled_tasks_sg, - prod_sizing=prod_sizing, - ) - catalog_indexer_task, catalog_indexer_task_def = self.set_scheduled_task( cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.catalog_indexer'], + command=['python3.8', '-m', 'dataall.modules.catalog.tasks.catalog_indexer_task'], container_id=f'container', ecr_repository=ecr_repository, - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'INFO', - }, - image_tag=cdkproxy_image_tag, + environment=self._create_env('INFO'), + image_tag=self._cdkproxy_image_tag, log_group=self.create_log_group( envname, resource_prefix, log_group_name='catalog-indexer' ), @@ -191,15 +175,11 @@ def __init__( stacks_updater, stacks_updater_task_def = self.set_scheduled_task( cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.stacks_updater'], + command=['python3.8', '-m', 'dataall.core.environment.tasks.env_stacks_updater'], container_id=f'container', ecr_repository=ecr_repository, - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'INFO', - }, - image_tag=cdkproxy_image_tag, + environment=self._create_env('INFO'), + image_tag=self._cdkproxy_image_tag, log_group=self.create_log_group( envname, resource_prefix, log_group_name='stacks-updater' ), @@ -219,82 +199,59 @@ def __init__( string_value=stacks_updater_task_def.task_definition_arn, ) - update_bucket_policies_task, update_bucket_task_def = self.set_scheduled_task( - cluster=cluster, - command=['python3.8', '-m', 'dataall.tasks.bucket_policy_updater'], - container_id=f'container', - ecr_repository=ecr_repository, - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'INFO', - }, - image_tag=cdkproxy_image_tag, - log_group=self.create_log_group( - envname, resource_prefix, log_group_name='policies-updater' - ), - schedule_expression=Schedule.expression('rate(15 minutes)'), - scheduled_task_id=f'{resource_prefix}-{envname}-policies-updater-schedule', - task_id=f'{resource_prefix}-{envname}-policies-updater', - task_role=self.task_role, - vpc=vpc, - security_group=self.scheduled_tasks_sg, - prod_sizing=prod_sizing, + ssm.StringParameter( + self, + f'ECSClusterNameParam{envname}', + parameter_name=f'/dataall/{envname}/ecs/cluster/name', + string_value=cluster.cluster_name, ) - subscriptions_task, subscription_task_def = self.set_scheduled_task( - cluster=cluster, - command=[ - 'python3.8', - '-m', - 'dataall.tasks.subscriptions.subscription_service', - ], - container_id=f'container', - ecr_repository=ecr_repository, - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'INFO', - }, - image_tag=cdkproxy_image_tag, - log_group=self.create_log_group( - envname, resource_prefix, log_group_name='subscriptions' + ssm.StringParameter( + self, + f'VPCPrivateSubnetsParam{envname}', + parameter_name=f'/dataall/{envname}/ecs/private_subnets', + string_value=','.join( + vpc.select_subnets( + subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT + ).subnet_ids ), - schedule_expression=Schedule.expression('rate(15 minutes)'), - scheduled_task_id=f'{resource_prefix}-{envname}-subscriptions-schedule', - task_id=f'{resource_prefix}-{envname}-subscriptions', - task_role=self.task_role, - vpc=vpc, - security_group=self.scheduled_tasks_sg, - prod_sizing=prod_sizing, ) + self.ecs_cluster = cluster + self.ecs_task_definitions_families = [ + cdkproxy_task_definition.family, + catalog_indexer_task.task_definition.family, + ] + + self.add_sync_dataset_table_task() + self.add_bucket_policy_updater_task() + self.add_subscription_task() + self.add_share_management_task() + + @run_if("modules.datasets.active") + def add_share_management_task(self): share_management_task_definition = ecs.FargateTaskDefinition( self, - f'{resource_prefix}-{envname}-share-manager', + f'{self._resource_prefix}-{self._envname}-share-manager', cpu=1024, memory_limit_mib=2048, task_role=self.task_role, execution_role=self.task_role, - family=f'{resource_prefix}-{envname}-share-manager', + family=f'{self._resource_prefix}-{self._envname}-share-manager', ) share_management_container = share_management_task_definition.add_container( - f'ShareManagementTaskContainer{envname}', + f'ShareManagementTaskContainer{self._envname}', container_name=f'container', image=ecs.ContainerImage.from_ecr_repository( - repository=ecr_repository, tag=cdkproxy_image_tag + repository=self._ecr_repository, tag=self._cdkproxy_image_tag ), - environment={ - 'AWS_REGION': self.region, - 'envname': envname, - 'LOGLEVEL': 'DEBUG', - }, - command=['python3.8', '-m', 'dataall.tasks.share_manager'], + environment=self._create_env('DEBUG'), + command=['python3.8', '-m', 'dataall.modules.dataset_sharing.tasks.share_manager_task'], logging=ecs.LogDriver.aws_logs( stream_prefix='task', log_group=self.create_log_group( - envname, resource_prefix, log_group_name='share-manager' + self._envname, self._resource_prefix, log_group_name='share-manager' ), ), readonly_root_filesystem=True, @@ -302,45 +259,88 @@ def __init__( ssm.StringParameter( self, - f'ShareManagementTaskDef{envname}', - parameter_name=f'/dataall/{envname}/ecs/task_def_arn/share_management', + f'ShareManagementTaskDef{self._envname}', + parameter_name=f'/dataall/{self._envname}/ecs/task_def_arn/share_management', string_value=share_management_task_definition.task_definition_arn, ) ssm.StringParameter( self, - f'ShareManagementContainerParam{envname}', - parameter_name=f'/dataall/{envname}/ecs/container/share_management', + f'ShareManagementContainerParam{self._envname}', + parameter_name=f'/dataall/{self._envname}/ecs/container/share_management', string_value=share_management_container.container_name, ) + self.ecs_task_definitions_families.append(share_management_task_definition.family) - ssm.StringParameter( - self, - f'ECSClusterNameParam{envname}', - parameter_name=f'/dataall/{envname}/ecs/cluster/name', - string_value=cluster.cluster_name, + @run_if("modules.datasets.active") + def add_subscription_task(self): + subscriptions_task, subscription_task_def = self.set_scheduled_task( + cluster=self.ecs_cluster, + command=[ + 'python3.8', + '-m', + 'dataall.modules.datasets.tasks.dataset_subscription_task', + ], + container_id=f'container', + ecr_repository=self._ecr_repository, + environment=self._create_env('INFO'), + image_tag=self._cdkproxy_image_tag, + log_group=self.create_log_group( + self._envname, self._resource_prefix, log_group_name='subscriptions' + ), + schedule_expression=Schedule.expression('rate(15 minutes)'), + scheduled_task_id=f'{self._resource_prefix}-{self._envname}-subscriptions-schedule', + task_id=f'{self._resource_prefix}-{self._envname}-subscriptions', + task_role=self.task_role, + vpc=self._vpc, + security_group=self.scheduled_tasks_sg, + prod_sizing=self._prod_sizing, ) + self.ecs_task_definitions_families.append(subscriptions_task.task_definition.family) - ssm.StringParameter( - self, - f'VPCPrivateSubnetsParam{envname}', - parameter_name=f'/dataall/{envname}/ecs/private_subnets', - string_value=','.join( - vpc.select_subnets( - subnet_type=ec2.SubnetType.PRIVATE_WITH_NAT - ).subnet_ids + @run_if("modules.datasets.active") + def add_bucket_policy_updater_task(self): + update_bucket_policies_task, update_bucket_task_def = self.set_scheduled_task( + cluster=self.ecs_cluster, + command=['python3.8', '-m', 'dataall.modules.datasets.tasks.bucket_policy_updater'], + container_id=f'container', + ecr_repository=self._ecr_repository, + environment=self._create_env('DEBUG'), + image_tag=self._cdkproxy_image_tag, + log_group=self.create_log_group( + self._envname, self._resource_prefix, log_group_name='policies-updater' ), + schedule_expression=Schedule.expression('rate(15 minutes)'), + scheduled_task_id=f'{self._resource_prefix}-{self._envname}-policies-updater-schedule', + task_id=f'{self._resource_prefix}-{self._envname}-policies-updater', + task_role=self.task_role, + vpc=self._vpc, + security_group=self.scheduled_tasks_sg, + prod_sizing=self._prod_sizing, ) + self.ecs_task_definitions_families.append(update_bucket_policies_task.task_definition.family) - self.ecs_cluster = cluster - self.ecs_task_definitions_families = [ - cdkproxy_task_definition.family, - sync_tables_task.task_definition.family, - update_bucket_policies_task.task_definition.family, - catalog_indexer_task.task_definition.family, - share_management_task_definition.family, - subscriptions_task.task_definition.family, - ] + @run_if("modules.datasets.active") + def add_sync_dataset_table_task(self): + sync_tables_task, sync_tables_task_def = self.set_scheduled_task( + cluster=self.ecs_cluster, + command=['python3.8', '-m', 'dataall.modules.datasets.tasks.tables_syncer'], + container_id=f'container', + ecr_repository=self._ecr_repository, + environment=self._create_env('INFO'), + image_tag=self._cdkproxy_image_tag, + log_group=self.create_log_group( + self._envname, self._resource_prefix, log_group_name='tables-syncer' + ), + schedule_expression=Schedule.expression('rate(15 minutes)'), + scheduled_task_id=f'{self._resource_prefix}-{self._envname}-tables-syncer-schedule', + task_id=f'{self._resource_prefix}-{self._envname}-tables-syncer', + task_role=self.task_role, + vpc=self._vpc, + security_group=self.scheduled_tasks_sg, + prod_sizing=self._prod_sizing, + ) + self.ecs_task_definitions_families.append(sync_tables_task.task_definition.family) def create_ecs_security_groups(self, envname, resource_prefix, vpc, vpce_connection, s3_prefix_list, lambdas): scheduled_tasks_sg = ec2.SecurityGroup( @@ -352,7 +352,7 @@ def create_ecs_security_groups(self, envname, resource_prefix, vpc, vpce_connect disable_inline_rules=True, ) - # Requires RAM Access via NAT + # Requires RAM Access via NAT share_manager_sg = ec2.SecurityGroup( self, f'ShareManagerSG{envname}', @@ -361,8 +361,8 @@ def create_ecs_security_groups(self, envname, resource_prefix, vpc, vpce_connect allow_all_outbound=False, disable_inline_rules=True, ) - - for sg in [scheduled_tasks_sg,share_manager_sg]: + + for sg in [scheduled_tasks_sg, share_manager_sg]: sg_connection = ec2.Connections(security_groups=[sg]) # Add ECS to VPC Endpoint Connection if vpce_connection: @@ -607,7 +607,7 @@ def set_scheduled_task( vpc, security_group, prod_sizing, - ) -> ecs_patterns.ScheduledFargateTask: + ) -> (ecs.FargateTaskDefinition, ecs_patterns.ScheduledFargateTask): task = ecs.FargateTaskDefinition( self, task_id, @@ -650,3 +650,11 @@ def set_scheduled_task( @property def ecs_task_role(self) -> iam.Role: return self.task_role + + def _create_env(self, log_lvl) -> Dict: + return { + 'AWS_REGION': self.region, + 'envname': self._envname, + 'LOGLEVEL': log_lvl, + 'config_location': '/config.json' + } diff --git a/deploy/stacks/deploy_config.py b/deploy/stacks/deploy_config.py new file mode 100644 index 000000000..4d87d754a --- /dev/null +++ b/deploy/stacks/deploy_config.py @@ -0,0 +1,71 @@ +"""Reads and encapsulates the configuration provided in config.json""" +import json +import copy +from typing import Any, Dict +import os +from pathlib import Path + + +class _DeployConfig: + """A container of properties in the configuration file + and any other that can be specified/overwritten later in the application""" + + def __init__(self): + self._config = self._read_config_file() + + def get_property(self, key: str, default=None) -> Any: + """ + Retrieves a copy of the property + Config uses dot as a separator to navigate easy to the needed property e.g. + some.needed.parameter is equivalent of config["some"]["needed"]["parameter"] + It enables fast navigation for any nested parameter + """ + res = self._config + + props = key.split(".") + + # going through the hierarchy of json + for prop in props: + if prop not in res: + if default is not None: + return default + + raise KeyError(f"Couldn't find a property {key} in the config") + + res = res[prop] + return copy.deepcopy(res) + + def set_property(self, key: str, value: Any) -> None: + """ + Sets a property into the config + If the property has dot it will be split to nested levels + """ + conf = self._config + props = key.split(".") + + for i, prop in enumerate(props): + if i == len(props) - 1: + conf[prop] = value + else: + conf[prop] = conf[prop] if prop in conf is not None else {} + conf = conf[prop] + + @classmethod + def _read_config_file(cls) -> Dict[str, Any]: + with open(cls._path_to_file()) as config_file: + return json.load(config_file) + + @staticmethod + def _path_to_file() -> str: + """Tries to get a property. If not defined it tries to resolve the config from the current file's directory""" + path = os.getenv("config_location") + if path: + return path + return os.path.join(Path(__file__).parents[2], "config.json") + + def __repr__(self): + return str(self._config) + + +deploy_config = _DeployConfig() + diff --git a/deploy/stacks/pipeline.py b/deploy/stacks/pipeline.py index e6159055c..3226fd91d 100644 --- a/deploy/stacks/pipeline.py +++ b/deploy/stacks/pipeline.py @@ -449,7 +449,8 @@ def set_quality_gate_stage(self): 'cd frontend', f'aws codeartifact login --tool npm --repository {self.codeartifact.codeartifact_npm_repo_name} --domain {self.codeartifact.codeartifact_domain_name} --domain-owner {self.codeartifact.domain.attr_owner}', 'npm install', - 'npm run lint', + 'npm run copy-config', + 'npm run lint -- --quiet', ], role=self.baseline_codebuild_role, vpc=self.vpc, @@ -498,6 +499,7 @@ def set_quality_gate_stage(self): commands=[ 'mkdir -p source_build', 'mv backend ./source_build/', + 'mv config.json ./source_build/', 'cd source_build/ && zip -r ../source_build/source_build.zip *', f'aws s3api put-object --bucket {self.pipeline_bucket.bucket_name} --key source_build.zip --body source_build.zip', ], @@ -517,6 +519,7 @@ def set_quality_gate_stage(self): commands=[ 'mkdir -p source_build', 'mv backend ./source_build/', + 'mv config.json ./source_build/', 'cd source_build/ && zip -r ../source_build/source_build.zip *', f'aws s3api put-object --bucket {self.pipeline_bucket.bucket_name} --key source_build.zip --body source_build.zip', ], @@ -839,6 +842,7 @@ def set_albfront_stage(self, target_env, repository_name): image_tag=self.image_tag, custom_domain=target_env['custom_domain'], ip_ranges=target_env.get('ip_ranges'), + resource_prefix=self.resource_prefix, ), pre=[ pipelines.CodeBuildStep( @@ -873,7 +877,6 @@ def set_albfront_stage(self, target_env, repository_name): 'pip install beautifulsoup4', 'python deploy/configs/frontend_config.py', 'unset AWS_PROFILE', - 'cd frontend', f'docker build -f docker/prod/Dockerfile --build-arg REACT_APP_STAGE={target_env["envname"]} --build-arg DOMAIN={target_env.get("custom_domain", {}).get("name")} -t $IMAGE_TAG:$IMAGE_TAG .', f'aws ecr get-login-password --region {self.region} | docker login --username AWS --password-stdin {self.account}.dkr.ecr.{self.region}.amazonaws.com', 'docker tag $IMAGE_TAG:$IMAGE_TAG $REPOSITORY_URI:$IMAGE_TAG', diff --git a/deploy/stacks/run_if.py b/deploy/stacks/run_if.py new file mode 100644 index 000000000..909bd6e33 --- /dev/null +++ b/deploy/stacks/run_if.py @@ -0,0 +1,37 @@ +from .deploy_config import deploy_config + + +def _process_func(func): + """Helper function that helps decorate methods/functions""" + def no_decorated(f): + return f + + static_func = False + try: + fn = func.__func__ + static_func = True + except AttributeError: + fn = func + + # returns a function to call and static decorator if applied + return fn, staticmethod if static_func else no_decorated + + +def run_if(active_property: str): + """ + Decorator that check whether a method should be active or not. + The active_property must be a boolean value in the config file + """ + def decorator(f): + fn, fn_decorator = _process_func(f) + + def decorated(*args, **kwargs): + is_active = deploy_config.get_property(active_property, False) + if not is_active: + return None + + return fn(*args, **kwargs) + + return fn_decorator(decorated) + + return decorator diff --git a/docker-compose.yaml b/docker-compose.yaml index bd41b1925..2e4751a56 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -16,11 +16,13 @@ services: - db environment: envname: 'dkrcompose' + config_location: "/config.json" AWS_REGION: "${AWS_REGION:-eu-west-1}" AWS_DEFAULT_REGION: "${AWS_DEFAULT_REGION:-eu-west-1}" volumes: - ./backend:/code - $HOME/.aws/credentials:/root/.aws/credentials:ro + - ./config.json:/config.json restart: on-failure:60 platform: @@ -30,18 +32,20 @@ services: build: context: ./backend dockerfile: docker/dev/Dockerfile - entrypoint: /bin/bash -c "../build/wait-for-it.sh elasticsearch:9200 -t 30 && python3.8 local.graphql.server.py" + entrypoint: /bin/bash -c "../build/wait-for-it.sh elasticsearch:9200 -t 30 && python3.8 local_graphql_server.py" expose: - 5000 ports: - 5000:5000 environment: envname: 'dkrcompose' + config_location: "/config.json" AWS_REGION: "${AWS_REGION:-eu-west-1}" AWS_DEFAULT_REGION: "${AWS_DEFAULT_REGION:-eu-west-1}" volumes: - ./backend:/code - $HOME/.aws/credentials:/root/.aws/credentials:ro + - ./config.json:/config.json depends_on: - db - elasticsearch @@ -89,12 +93,12 @@ services: frontend: build: - context: ./frontend - dockerfile: docker/dev/Dockerfile + context: . + dockerfile: frontend/docker/dev/Dockerfile deploy: resources: limits: - memory: 1024M + memory: 4096M reservations: memory: 128M ports: diff --git a/documentation/userguide/docs/environments.md b/documentation/userguide/docs/environments.md index 864ddb57c..70f2f55ba 100644 --- a/documentation/userguide/docs/environments.md +++ b/documentation/userguide/docs/environments.md @@ -102,15 +102,6 @@ Enterprise option as show below: ![quicksight](pictures/environments/boot_qs_2.png#zoom#shadow) -After you've successfully subscribed to QuickSight, we need to trust *data.all* domain on QuickSight -to enable Dashboard Embedding on *data.all* UI. To do that go to: - -1. Manage QuickSight -2. Domains and Embedding -3. Put *data.all* domain and check include subdomains -4. Save - -![quicksight_domain](pictures/environments/boot_qs_3.png#zoom#shadow) ### 5. (For ML Studio) Delete or adapt the default VPC If ML Studio is enabled, data.all checks if there is an existing SageMaker Studio domain. If there is an existing domain @@ -202,7 +193,6 @@ the environment organization. There are several tabs just below the environment - Teams: list of all teams onboarded to this environment. - Datasets: list of all datasets owned and shared with for this environment - Networks: VPCs created and owned by the environment -- Warehouses: Redshift clusters imported or created in this environment - Subscriptions: SNS topic subscriptions enabled or disabled in the environment - Tags: editable key-value tags - Stack: CloudFormation stack details and logs @@ -282,9 +272,9 @@ disabled as appears in the following picture. ![](pictures/environments/env_teams_2.png#zoom#shadow) When the invitation is saved, the environment CloudFormation stack gets automatically updated and creates a -new IAM role for the new team. The IAM role policies mapped to the permissions granted to the invited team -(e.g., a team invited without "Create Redshift clusters" permission will not have -redshift permissions on the associated IAM role).To remove a group, in the *Actions* column select the minus icon. +new IAM role for the new team. The IAM role policies are mapped to the permissions and are granted to the invited team +(e.g., a team invited without "Create ML Studio" permission will not have +Sagemaker permissions on the associated IAM role).To remove a group, in the *Actions* column select the minus icon. !!! warning "Automated permission assignment" diff --git a/documentation/userguide/docs/redshift.md b/documentation/userguide/docs/redshift.md deleted file mode 100644 index a1b03a96e..000000000 --- a/documentation/userguide/docs/redshift.md +++ /dev/null @@ -1,71 +0,0 @@ -# **Integrations** - -## **Data Warehouse** -Datahub natively supports Amazon Redshift, -which allows you to integrate seamlessly your Redshift cluster with your Datahub environment. - -### **Create Redshift cluster** - -To create an Amazon Redshift cluster: - -1. On left pane under **Play!** choose **Warehouses** then **Create** -2. The **creation form** opens. -3. Choose the environment where the cluster will - be created. -4. Fill in the form with the cluster properties (The AWS VPC must have private subnets) -5. Save the form -![create_cluster](pictures/integrations/create_cluster.png#zoom#shadow) - -!!! success - **You created a new Amazon Redshift cluster!** - -### **Import Redshift cluster** - -If you already have data stored on Amazon S3 buckets, Datahub got you covered with the import feature. - -To import a dataset: - -1. On left pane choose **Contribute** then **Import** -2. The **dataset form** opens. -3. Choose the environment where the dataset will - be created. -4. In **Dataset label**, enter a name for your dataset. -5. Grab your Amazon S3 bucket name and put it on bucket name field. - -![import_dataset](pictures/integrations/import_cluster.png#zoom#shadow) -!!! success - **You imported an existing Redshift cluster to Datahub!** - -### 📥 **Load datasets to your cluster with Spectrum** - -Datahub offers natively an integration with Redshift Spectrum -to load your data from Amazon S3 to your cluster. -To load a dataset: - -1. Select your Redshift cluster -2. Go to **Datasets** tab. -3. Click on **Load Datasets** and choose the dataset you want to load. - ![load_dataset](pictures/integrations/load_dataset.png#zoom#shadow) -4. Use the connection details on the connection tab to access your cluster database - ![connection](pictures/integrations/connection.png#zoom#shadow) - ![connect_redshift](pictures/integrations/connect_redshift.png#zoom#shadow) -5. Query you dataset on Redshift. - ![query_loaded_dataset](pictures/integrations/query_loaded_dataset.png#zoom#shadow) - -### 🖨️ **Copy dataset table to your cluster with COPY command** -As data subscriber, Datahub can automate copying data from S3 to your Redshift cluster, -when data producers publish an update. - -🧙 Load the dataset first, then manage its tables copy subscriptions. - -To manage data copy: -1. Select your Redshift cluster -2. Go to **Tables** tab. - ![enable_copy](pictures/integrations/enable_copy.png#zoom#shadow) -3. Click on **Subscribe** and choose the table you want to copy on the cluster and the target schema where - the table will be created. -!!!abstract "COPY confirmed" - Now your table will have the latest snapshot of data from the producers **at each update.** - -The latest table data snapshot is created on the assigned schema -![copy_table.png](pictures/integrations/copy_table.png#zoom#shadow) diff --git a/documentation/userguide/mkdocs.yml b/documentation/userguide/mkdocs.yml index 13373a05a..222764107 100644 --- a/documentation/userguide/mkdocs.yml +++ b/documentation/userguide/mkdocs.yml @@ -16,7 +16,6 @@ nav: - ML Studio: mlstudio.md - Pipelines: pipelines.md - Dashboards: dashboards.md - # - Warehouses: redshift.md - Security: security.md - Monitoring: monitoring.md - Labs: @@ -26,7 +25,6 @@ nav: #- Exploration with Notebooks and Worksheets: lab_template.md #- Creating and sharing dashboards: lab_template.md #- Using ML Studio: lab_template.md - #- Using Redshift clusters: lab_template.md use_directory_urls: false diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 000000000..bb64915a7 --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,3 @@ +node_modules +build + diff --git a/frontend/.gitignore b/frontend/.gitignore index dbcc37c82..0e80919f0 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -10,6 +10,7 @@ # production /build +/src/generated # misc .DS_Store @@ -24,3 +25,4 @@ npm-debug.log* yarn-debug.log* yarn-error.log* .idea +.yarn diff --git a/frontend/docker/dev/Dockerfile b/frontend/docker/dev/Dockerfile index aa29c376c..c44983ed1 100644 --- a/frontend/docker/dev/Dockerfile +++ b/frontend/docker/dev/Dockerfile @@ -1,38 +1,24 @@ -FROM public.ecr.aws/amazonlinux/amazonlinux:2 +FROM public.ecr.aws/docker/library/node:18-slim -ARG NODE_VERSION=16 -ARG NGINX_VERSION=1.12 -ARG NVM_VERSION=v0.37.0 +WORKDIR /app -RUN yum update -y && \ - yum install -y tar gzip openssl && \ - yum clean all -y -RUN amazon-linux-extras install nginx$NGINX_VERSION +COPY ./frontend/package.json ./ +COPY ./frontend/yarn.lock ./ -RUN touch ~/.bashrc +# Install packages, use --ignore-scripts to not call postinstall, as it causes this step to fail because config.json +# is not copied yet, and copying it here will trigger new install if config changes, which is inconvenient for development. +RUN yarn install --ignore-scripts -RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/$NVM_VERSION/install.sh | bash -RUN . ~/.nvm/nvm.sh && nvm install node -RUN echo '. ~/.nvm/nvm.sh' >> ~/.bashrc +COPY ./frontend/docker/dev/.env . +COPY ./frontend . -RUN . ~/.nvm/nvm.sh && npm install -g npm yarn +# Copy config.json to docker root, because app scripts read it from ".." +COPY ./config.json / -COPY package.json yarn.lock ./ +# Disable linting before starting the server +ENV DISABLE_ESLINT_PLUGIN=true -RUN . ~/.nvm/nvm.sh && yarn install +# Set the port to serve the application +ENV PORT=80 -ENV PATH="./node_modules/.bin:$PATH" - -COPY ./docker/dev/.env ./ - -COPY ./docker/dev/nginx.config /etc/nginx/nginx.template - -RUN cp /etc/nginx/nginx.template /etc/nginx/nginx.conf - -COPY . ./ - -RUN . ~/.nvm/nvm.sh && yarn build - -RUN cp -a build/. /usr/share/nginx/html/ - -CMD ["nginx", "-g", "daemon off;"] +CMD yarn start diff --git a/frontend/docker/prod/Dockerfile b/frontend/docker/prod/Dockerfile index 10b4d4966..d66c3064d 100644 --- a/frontend/docker/prod/Dockerfile +++ b/frontend/docker/prod/Dockerfile @@ -19,13 +19,15 @@ RUN echo '. ~/.nvm/nvm.sh' >> ~/.bashrc RUN . ~/.nvm/nvm.sh && npm install -g npm yarn -COPY package.json yarn.lock ./ +WORKDIR /app +COPY ./frontend/package.json ./frontend/yarn.lock ./ +COPY ./config.json / RUN . ~/.nvm/nvm.sh && yarn install ENV PATH="./node_modules/.bin:$PATH" -COPY ./docker/prod/nginx.config /etc/nginx/nginx.template +COPY ./frontend/docker/prod/nginx.config /etc/nginx/nginx.template ENV SERVERNAME=$DOMAIN @@ -39,7 +41,7 @@ RUN cp /etc/nginx/nginx.template /etc/nginx/nginx.conf RUN cat /etc/nginx/nginx.conf -COPY . ./ +COPY ./frontend ./ RUN . ~/.nvm/nvm.sh && yarn build diff --git a/frontend/jsconfig.json b/frontend/jsconfig.json new file mode 100644 index 000000000..93d961db9 --- /dev/null +++ b/frontend/jsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "baseUrl": "src", + "paths": { + "authentication/*": ["src/authentication/*"], + "design/*": ["src/design/*"], + "globalErrors/*": ["src/globalErrors/*"], + "modules/*": ["src/modules/*"], + "services/*": ["src/services/*"], + "utils/*": ["src/utils/*"], + "Shared/*": ["src/modules/Shared/*"] + } + } +} diff --git a/frontend/package.json b/frontend/package.json index 1974a548c..4b54b4bf5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -6,10 +6,14 @@ "scripts": { "start": "react-scripts start", "build": "react-scripts build", + "copy-config": "mkdir -p ./src/generated; cp ../config.json ./src/generated/", + "postinstall": "yarn copy-config", + "prestart": "yarn copy-config", + "prebuild": "yarn copy-config", "test": "react-scripts test", "eject": "react-scripts eject", - "lint": "node_modules/.bin/eslint --ext js src", - "lint-fix": "node_modules/.bin/eslint --fix --ext js src" + "lint": "eslint --ext js src", + "lint:fix": "eslint --fix --ext js src" }, "dependencies": { "@apollo/client": "^3.3.19", @@ -18,11 +22,11 @@ "@emotion/styled": "^11.8.1", "@monaco-editor/react": "^4.3.1", "@mui/icons-material": "^5.5.1", - "@mui/x-date-pickers": "^5.0.0", "@mui/lab": "^5.0.0-alpha.74", "@mui/material": "^5.5.2", "@mui/styles": "^5.5.1", "@mui/x-data-grid": "^5.7.0", + "@mui/x-date-pickers": "^5.0.0", "@reduxjs/toolkit": "^1.8.0", "@testing-library/jest-dom": "^5.16.2", "@testing-library/react": "^12.1.4", @@ -60,7 +64,7 @@ "web-vitals": "^2.1.4", "yup": "^0.32.11" }, - "overrides" : { + "overrides": { "@appbaseio/reactivesearch": { "react-redux": "^7.2.6" } @@ -70,13 +74,84 @@ }, "devDependencies": { "env-cmd": "^10.1.0", - "prettier": "^2.6.1" + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.27.5", + "eslint-import-resolver-alias": "^1.1.2", + "eslint-plugin-prettier": "^4.2.1", + "prettier": "2.8.7", + "watch": "^1.0.2" }, "eslintConfig": { + "plugins": [ + "prettier", + "import" + ], "extends": [ "react-app", - "react-app/jest" - ] + "react-app/jest", + "plugin:prettier/recommended", + "plugin:import/recommended" + ], + "settings": { + "import/resolver": { + "alias": { + "map": [ + [ + "authentication", + "./src/authentication" + ], + [ + "design", + "./src/design" + ], + [ + "globalErrors", + "./src/globalErrors" + ], + [ + "modules", + "./src/modules" + ], + [ + "services", + "./src/services" + ], + [ + "utils", + "./src/utils" + ], + [ + "Shared", + "./src/modules/Shared" + ] + ], + "extensions": [ + ".js", + ".jsx", + ".json" + ] + } + } + }, + "rules": { + "no-unused-vars": "error", + "no-const-assign": "error", + "eqeqeq": "error", + "no-console": [ + "error", + { + "allow": [ + "error", + "info" + ] + } + ], + "jsx-quotes": [ + "error", + "prefer-double" + ], + "import/no-default-export": "warn" + } }, "browserslist": { "production": [ @@ -89,5 +164,8 @@ "last 1 firefox version", "last 1 safari version" ] + }, + "engines": { + "yarn": "^1.22.19" } } diff --git a/frontend/src/App.js b/frontend/src/App.js index 22275a4da..c8483fef9 100644 --- a/frontend/src/App.js +++ b/frontend/src/App.js @@ -1,15 +1,17 @@ -import { useRoutes } from 'react-router-dom'; -import { SnackbarProvider } from 'notistack'; import { ThemeProvider } from '@mui/material'; -import GlobalStyles from './components/GlobalStyles'; -import SplashScreen from './components/SplashScreen'; -import useAuth from './hooks/useAuth'; -import useScrollReset from './hooks/useScrollReset'; -import useSettings from './hooks/useSettings'; +import { SnackbarProvider } from 'notistack'; +import { useRoutes } from 'react-router-dom'; +import { useAuth } from './authentication'; +import { + GlobalStyles, + SplashScreen, + createMaterialTheme, + useScrollReset, + useSettings +} from './design'; import routes from './routes'; -import { createMaterialTheme } from './theme'; -const App = () => { +export const App = () => { const content = useRoutes(routes); const { settings } = useSettings(); const auth = useAuth(); @@ -31,5 +33,3 @@ const App = () => { ); }; - -export default App; diff --git a/frontend/src/api/Activity/listUserActivity.js b/frontend/src/api/Activity/listUserActivity.js deleted file mode 100644 index f0c3b32e1..000000000 --- a/frontend/src/api/Activity/listUserActivity.js +++ /dev/null @@ -1,28 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listUserActivities = ({ filter }) => ({ - variables: { - filter - }, - query: gql` - query ListUserActivities($filter: ActivityFilter) { - listUserActivities(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - activityUri - created - summary - targetUri - targetType - action - } - } - } - ` -}); - -export default listUserActivities; diff --git a/frontend/src/api/AirflowCluster/createAirflowProject.js b/frontend/src/api/AirflowCluster/createAirflowProject.js deleted file mode 100644 index fe9d750f2..000000000 --- a/frontend/src/api/AirflowCluster/createAirflowProject.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createAirflowProject = ({ clusterUri, input }) => ({ - variables: { - clusterUri, - projectInput: input - }, - mutation: gql` - mutation createAirflowClusterProject( - $clusterUri: String! - $projectInput: NewAirflowProjectInput! - ) { - createAirflowClusterProject( - clusterUri: $clusterUri - projectInput: $projectInput - ) { - projectUri - name - label - created - } - } - ` -}); - -export default createAirflowProject; diff --git a/frontend/src/api/AirflowCluster/createCluster.js b/frontend/src/api/AirflowCluster/createCluster.js deleted file mode 100644 index 013be4f32..000000000 --- a/frontend/src/api/AirflowCluster/createCluster.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createAirflowCluster = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - clusterInput: input - }, - mutation: gql` - mutation createAirflowCluster( - $environmentUri: String! - $clusterInput: NewAirflowClusterInput! - ) { - createAirflowCluster( - environmentUri: $environmentUri - clusterInput: $clusterInput - ) { - clusterUri - name - label - created - } - } - ` -}); - -export default createAirflowCluster; diff --git a/frontend/src/api/AirflowCluster/deleteAirflowProject.js b/frontend/src/api/AirflowCluster/deleteAirflowProject.js deleted file mode 100644 index dc00a365c..000000000 --- a/frontend/src/api/AirflowCluster/deleteAirflowProject.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteAirflowProject = ({ projectUri }) => ({ - variables: { projectUri }, - mutation: gql` - mutation deleteAirflowProject($projectUri: String) { - deleteAirflowProject(projectUri: $projectUri) - } - ` -}); - -export default deleteAirflowProject; diff --git a/frontend/src/api/AirflowCluster/deleteCluster.js b/frontend/src/api/AirflowCluster/deleteCluster.js deleted file mode 100644 index 765e3d22f..000000000 --- a/frontend/src/api/AirflowCluster/deleteCluster.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteAirflowCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - mutation: gql` - mutation deleteAirflowCluster($clusterUri: String!) { - deleteAirflowCluster(clusterUri: $clusterUri) - } - ` -}); - -export default deleteAirflowCluster; diff --git a/frontend/src/api/AirflowCluster/getAirflowUIAccess.js b/frontend/src/api/AirflowCluster/getAirflowUIAccess.js deleted file mode 100644 index d6de355c5..000000000 --- a/frontend/src/api/AirflowCluster/getAirflowUIAccess.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getAirflowClusterWebLoginToken = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query getAirflowClusterWebLoginToken($clusterUri: String!) { - getAirflowClusterWebLoginToken(clusterUri: $clusterUri) - } - ` -}); - -export default getAirflowClusterWebLoginToken; diff --git a/frontend/src/api/AirflowCluster/getCluster.js b/frontend/src/api/AirflowCluster/getCluster.js deleted file mode 100644 index 123170968..000000000 --- a/frontend/src/api/AirflowCluster/getCluster.js +++ /dev/null @@ -1,64 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query GetAirflowCluster($clusterUri: String!) { - getAirflowCluster(clusterUri: $clusterUri) { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - maxWorkers - environmentClass - kmsAlias - status - CFNStackName - CFNStackStatus - CFNStackArn - IAMRoleArn - subnetIds - vpc - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - stack { - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - } - } - } - ` -}); - -export default getCluster; diff --git a/frontend/src/api/AirflowCluster/getClusterConsoleAccess.js b/frontend/src/api/AirflowCluster/getClusterConsoleAccess.js deleted file mode 100644 index b9dd19028..000000000 --- a/frontend/src/api/AirflowCluster/getClusterConsoleAccess.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getClusterConsoleAccess = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query getAirflowClusterConsoleAccess($clusterUri: String!) { - getAirflowClusterConsoleAccess(clusterUri: $clusterUri) - } - ` -}); - -export default getClusterConsoleAccess; diff --git a/frontend/src/api/AirflowCluster/importCluster.js b/frontend/src/api/AirflowCluster/importCluster.js deleted file mode 100644 index 1e89f0337..000000000 --- a/frontend/src/api/AirflowCluster/importCluster.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const importAirflowCluster = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - clusterInput: input - }, - mutation: gql` - mutation importAirflowCluster( - $environmentUri: String! - $clusterInput: ImportClusterInput! - ) { - importAirflowCluster( - environmentUri: $environmentUri - clusterInput: $clusterInput - ) { - clusterUri - name - label - created - } - } - ` -}); - -export default importAirflowCluster; diff --git a/frontend/src/api/AirflowCluster/listClusterProjects.js b/frontend/src/api/AirflowCluster/listClusterProjects.js deleted file mode 100644 index 57c8358c0..000000000 --- a/frontend/src/api/AirflowCluster/listClusterProjects.js +++ /dev/null @@ -1,37 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listAirflowProjects = ({ clusterUri, filter }) => ({ - variables: { - clusterUri, - filter - }, - query: gql` - query listAirflowClusterProjects( - $clusterUri: String! - $filter: AirflowProjectFilter - ) { - listAirflowClusterProjects(clusterUri: $clusterUri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - projectUri - name - packageName - codeRepositoryName - codeRepositoryLink - codeRepositoryStatus - codePipelineName - codePipelineArn - codePipelineLink - description - created - } - } - } - ` -}); - -export default listAirflowProjects; diff --git a/frontend/src/api/AirflowCluster/listEnvironmentAirflowClusters.js b/frontend/src/api/AirflowCluster/listEnvironmentAirflowClusters.js deleted file mode 100644 index 760d3d05f..000000000 --- a/frontend/src/api/AirflowCluster/listEnvironmentAirflowClusters.js +++ /dev/null @@ -1,70 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentAirflowClusters = (environmentUri, filter) => ({ - variables: { - environmentUri, - filter - }, - query: gql` - query listEnvironmentAirflowClusters( - $environmentUri: String! - $filter: AirflowClusterFilter - ) { - listEnvironmentAirflowClusters( - environmentUri: $environmentUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - created - kmsAlias - status - CFNStackName - CFNStackStatus - CFNStackArn - IAMRoleArn - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - dagS3Path - webServerUrl - stack { - status - } - vpc - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - } - } - } - ` -}); - -export default listEnvironmentAirflowClusters; diff --git a/frontend/src/api/AirflowCluster/searchClusters.js b/frontend/src/api/AirflowCluster/searchClusters.js deleted file mode 100644 index 76324a2bf..000000000 --- a/frontend/src/api/AirflowCluster/searchClusters.js +++ /dev/null @@ -1,60 +0,0 @@ -import { gql } from 'apollo-boost'; - -const searchAirflowClusters = (filter) => ({ - variables: { - filter - }, - query: gql` - query searchAirflowClusters($filter: AirflowClusterFilter) { - searchAirflowClusters(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - created - kmsAlias - status - CFNStackName - CFNStackStatus - CFNStackArn - IAMRoleArn - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - dagS3Path - webServerUrl - vpc - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - } - } - } - ` -}); - -export default searchAirflowClusters; diff --git a/frontend/src/api/ApiKeys/createApiKey.js b/frontend/src/api/ApiKeys/createApiKey.js deleted file mode 100644 index c2a5917b5..000000000 --- a/frontend/src/api/ApiKeys/createApiKey.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createApiKey = () => ({ - mutation: gql` - mutation CreateApiKey { - createApiKey { - ApiKeyId - ApiKeySecret - expires - } - } - ` -}); - -export default createApiKey; diff --git a/frontend/src/api/ApiKeys/deleteApiKey.js b/frontend/src/api/ApiKeys/deleteApiKey.js deleted file mode 100644 index 9444c6900..000000000 --- a/frontend/src/api/ApiKeys/deleteApiKey.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteApiKey = (ApiKeyId) => ({ - variables: { - ApiKeyId - }, - mutation: gql` - mutation DeleteApiKey($ApiKeyId: String!) { - deleteApiKey(ApiKeyId: $ApiKeyId) - } - ` -}); - -export default deleteApiKey; diff --git a/frontend/src/api/Catalog/searchDatasets.js b/frontend/src/api/Catalog/searchDatasets.js deleted file mode 100644 index 240f3d5ff..000000000 --- a/frontend/src/api/Catalog/searchDatasets.js +++ /dev/null @@ -1,55 +0,0 @@ -import { gql } from 'apollo-boost'; - -const searchDatasets = ({ filters, page, term }) => ({ - variables: { - filters, - page: page || 1, - term - }, - query: gql` - query SearchDatasets($filters: FacetFilters, $page: Int, $term: String) { - searchDatasets(filters: $filters, page: $page, term: $term) { - hits { - count - page - pageSize - hasNext - hasPrevious - pages - nodes { - datasetUri - label - owner - userRoleForDataset - created - region - description - tags - organization { - label - organizationUri - } - environment { - label - } - statistics { - tables - locations - } - } - } - facets { - groups { - dimensionName - items { - value - count - } - } - } - } - } - ` -}); - -export default searchDatasets; diff --git a/frontend/src/api/Dashboard/approveDashboardShare.js b/frontend/src/api/Dashboard/approveDashboardShare.js deleted file mode 100644 index 75fbe10ec..000000000 --- a/frontend/src/api/Dashboard/approveDashboardShare.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const approveDashboardShare = (shareUri) => ({ - variables: { - shareUri - }, - mutation: gql` - mutation approveDashboardShare($shareUri: String!) { - approveDashboardShare(shareUri: $shareUri) { - shareUri - status - } - } - ` -}); - -export default approveDashboardShare; diff --git a/frontend/src/api/Dashboard/createDashboard.js b/frontend/src/api/Dashboard/createDashboard.js deleted file mode 100644 index 2a57752b5..000000000 --- a/frontend/src/api/Dashboard/createDashboard.js +++ /dev/null @@ -1,19 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDashboard = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation CreateDashboard($input: NewDashboardInput) { - createDashboard(input: $input) { - dashboardUri - name - label - created - } - } - ` -}); - -export default createDashboard; diff --git a/frontend/src/api/Dashboard/deleteDashboard.js b/frontend/src/api/Dashboard/deleteDashboard.js deleted file mode 100644 index 7b9d71eac..000000000 --- a/frontend/src/api/Dashboard/deleteDashboard.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDashboard = (dashboardUri) => ({ - variables: { - dashboardUri - }, - mutation: gql` - mutation importDashboard($dashboardUri: String!) { - deleteDashboard(dashboardUri: $dashboardUri) - } - ` -}); - -export default deleteDashboard; diff --git a/frontend/src/api/Dashboard/getDashboardReaderSession.js b/frontend/src/api/Dashboard/getDashboardReaderSession.js deleted file mode 100644 index 5d3fe472f..000000000 --- a/frontend/src/api/Dashboard/getDashboardReaderSession.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getReaderSession = (dashboardUri) => ({ - variables: { - dashboardUri - }, - query: gql` - query GetReaderSession($dashboardUri: String) { - getReaderSession(dashboardUri: $dashboardUri) - } - ` -}); - -export default getReaderSession; diff --git a/frontend/src/api/Dashboard/rejectDashboardShare.js b/frontend/src/api/Dashboard/rejectDashboardShare.js deleted file mode 100644 index 052ec530e..000000000 --- a/frontend/src/api/Dashboard/rejectDashboardShare.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const rejectDashboardShare = (shareUri) => ({ - variables: { - shareUri - }, - mutation: gql` - mutation rejectDashboardShare($shareUri: String!) { - rejectDashboardShare(shareUri: $shareUri) { - shareUri - status - } - } - ` -}); - -export default rejectDashboardShare; diff --git a/frontend/src/api/Dashboard/shareDashboard.js b/frontend/src/api/Dashboard/shareDashboard.js deleted file mode 100644 index 7643bda26..000000000 --- a/frontend/src/api/Dashboard/shareDashboard.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const shareDashboard = (dashboardUri, principalId) => ({ - variables: { - dashboardUri, - principalId - }, - mutation: gql` - mutation shareDashboard($dashboardUri: String!, $principalId: String!) { - shareDashboard(dashboardUri: $dashboardUri, principalId: $principalId) { - shareUri - status - } - } - ` -}); - -export default shareDashboard; diff --git a/frontend/src/api/DataPipeline/browseDataPipelineRepository.js b/frontend/src/api/DataPipeline/browseDataPipelineRepository.js deleted file mode 100644 index f7451886e..000000000 --- a/frontend/src/api/DataPipeline/browseDataPipelineRepository.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const browseDataPipelineRepository = (input) => ({ - variables: { - input - }, - query: gql` - query BrowseDataPipelineRepository($input: DataPipelineBrowseInput!) { - browseDataPipelineRepository(input: $input) - } - ` -}); - -export default browseDataPipelineRepository; diff --git a/frontend/src/api/DataPipeline/createDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/createDataPipelineEnvironment.js deleted file mode 100644 index 103cbe098..000000000 --- a/frontend/src/api/DataPipeline/createDataPipelineEnvironment.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDataPipelineEnvironment = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation createDataPipelineEnvironment($input: NewDataPipelineEnvironmentInput) { - createDataPipelineEnvironment(input: $input) { - envPipelineUri - environmentUri - environmentLabel - pipelineUri - pipelineLabel - stage - region - AwsAccountId - samlGroupName - } - } - ` -}); - -export default createDataPipelineEnvironment; diff --git a/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js deleted file mode 100644 index 732d718c9..000000000 --- a/frontend/src/api/DataPipeline/deleteDataPipelineEnvironment.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDataPipelineEnvironment = ({ envPipelineUri }) => ({ - variables: { - envPipelineUri - }, - mutation: gql` - mutation deleteDataPipelineEnvironment( - $envPipelineUri: String! - ) { - deleteDataPipelineEnvironment( - envPipelineUri: $envPipelineUri - ) - } - ` -}); - -export default deleteDataPipelineEnvironment; diff --git a/frontend/src/api/DataPipeline/getDataPipelineCredsLinux.js b/frontend/src/api/DataPipeline/getDataPipelineCredsLinux.js deleted file mode 100644 index 0d3780f03..000000000 --- a/frontend/src/api/DataPipeline/getDataPipelineCredsLinux.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDataPipelineCredsLinux = (DataPipelineUri) => ({ - variables: { - DataPipelineUri - }, - query: gql` - query GetDataPipelineCredsLinux($DataPipelineUri: String!) { - getDataPipelineCredsLinux(DataPipelineUri: $DataPipelineUri) - } - ` -}); - -export default getDataPipelineCredsLinux; diff --git a/frontend/src/api/DataPipeline/getDataPipelineDag.js b/frontend/src/api/DataPipeline/getDataPipelineDag.js deleted file mode 100644 index 609bd4636..000000000 --- a/frontend/src/api/DataPipeline/getDataPipelineDag.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDataPipelineDag = (DataPipelineUri) => ({ - variables: { - DataPipelineUri - }, - query: gql` - query GetDataPipelineDag($DataPipelineUri: String!) { - getDataPipelineDag(DataPipelineUri: $DataPipelineUri) - } - ` -}); - -export default getDataPipelineDag; diff --git a/frontend/src/api/DataPipeline/getDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/getDataPipelineEnvironment.js deleted file mode 100644 index 2289fb15b..000000000 --- a/frontend/src/api/DataPipeline/getDataPipelineEnvironment.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDataPipelineEnvironment = (envPipelineUri) => ({ - variables: { - envPipelineUri - }, - query: gql` - query getDataPipelineEnvironment($envPipelineUri: String!) { - getDataPipelineEnvironment(envPipelineUri: $envPipelineUri) { - envPipelineUri - environmentUri - environmentLabel - pipelineUri - pipelineLabel - stage - region - AwsAccountId - SamlGroupName - } - } - ` -}); - -export default getDataPipelineEnvironment; diff --git a/frontend/src/api/DataPipeline/getDataPipelineFileContent.js b/frontend/src/api/DataPipeline/getDataPipelineFileContent.js deleted file mode 100644 index 2a7f0e0ed..000000000 --- a/frontend/src/api/DataPipeline/getDataPipelineFileContent.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDataPipelineFileContent = (input) => ({ - variables: { - input - }, - query: gql` - query getDataPipelineFileContent($input: DataPipelineFileContentInput!) { - getDataPipelineFileContent(input: $input) - } - ` -}); - -export default getDataPipelineFileContent; diff --git a/frontend/src/api/DataPipeline/listDataPipelineBranches.js b/frontend/src/api/DataPipeline/listDataPipelineBranches.js deleted file mode 100644 index 7c6270929..000000000 --- a/frontend/src/api/DataPipeline/listDataPipelineBranches.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDataPipelineBranches = (DataPipelineUri) => ({ - variables: { - DataPipelineUri - }, - query: gql` - query ListDataPipelineBranches($DataPipelineUri: String!) { - listDataPipelineBranches(DataPipelineUri: $DataPipelineUri) - } - ` -}); - -export default listDataPipelineBranches; diff --git a/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js b/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js deleted file mode 100644 index 7ea5ae26a..000000000 --- a/frontend/src/api/DataPipeline/updateDataPipelineEnvironment.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDataPipelineEnvironment = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation updateDataPipelineEnvironment($input: NewDataPipelineEnvironmentInput) { - updateDataPipelineEnvironment(input: $input) { - envPipelineUri - environmentUri - environmentLabel - pipelineUri - pipelineLabel - stage - region - AwsAccountId - samlGroupName - } - } - ` -}); - -export default updateDataPipelineEnvironment; diff --git a/frontend/src/api/DataPipeline/updatePipelineStack.js b/frontend/src/api/DataPipeline/updatePipelineStack.js deleted file mode 100644 index 17a071fe3..000000000 --- a/frontend/src/api/DataPipeline/updatePipelineStack.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updatePipelineStack = (DataPipelineUri) => ({ - variables: { DataPipelineUri }, - mutation: gql` - mutation updatePipelineStack($DataPipelineUri: String!) { - updatePipelineStack(DataPipelineUri: $DataPipelineUri) - } - ` -}); - -export default updatePipelineStack; diff --git a/frontend/src/api/Dataset/addDatasetContributor.js b/frontend/src/api/Dataset/addDatasetContributor.js deleted file mode 100644 index 5e104b3cc..000000000 --- a/frontend/src/api/Dataset/addDatasetContributor.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addDatasetContributor = ({ userName, datasetUri, role }) => ({ - variables: { userName, datasetUri, role }, - mutation: gql` - mutation AddDatasetContributor( - $datasetUri: String - $userName: String - $role: DatasetRole - ) { - addDatasetContributor( - datasetUri: $datasetUri - userName: $userName - role: $role - ) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default addDatasetContributor; diff --git a/frontend/src/api/Dataset/addDatasetLoader.js b/frontend/src/api/Dataset/addDatasetLoader.js deleted file mode 100644 index 22241074d..000000000 --- a/frontend/src/api/Dataset/addDatasetLoader.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetLoader = ({ datasetUri, input }) => ({ - variables: { input, datasetUri }, - mutation: gql` - mutation createDatasetLoader( - $datasetUri: String - $input: NewDatasetLoaderInput - ) { - createDatasetLoader(datasetUri: $datasetUri, input: $input) - } - ` -}); - -export default createDatasetLoader; diff --git a/frontend/src/api/Dataset/addDatasetTable.js b/frontend/src/api/Dataset/addDatasetTable.js deleted file mode 100644 index faf6541d9..000000000 --- a/frontend/src/api/Dataset/addDatasetTable.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetTable = ({ datasetUri, input }) => ({ - variables: { datasetUri, input }, - mutation: gql` - mutation CreateDatasetTable( - $datasetUri: String - $input: NewDatasetTableInput - ) { - createDatasetTable(datasetUri: $datasetUri, input: $input) { - tableUri - name - } - } - ` -}); - -export default createDatasetTable; diff --git a/frontend/src/api/Dataset/addTablePermission.js b/frontend/src/api/Dataset/addTablePermission.js deleted file mode 100644 index 857b2fc57..000000000 --- a/frontend/src/api/Dataset/addTablePermission.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addTablePermissions = ({ tableUri, role, userName }) => ({ - variables: { - tableUri, - role, - userName - }, - mutation: gql` - mutation AddTablePermission( - $tableUri: String! - $userName: String! - $role: DatasetRole! - ) { - addTablePermission( - tableUri: $tableUri - userName: $userName - role: $role - ) { - tableUri - } - } - ` -}); - -export default addTablePermissions; diff --git a/frontend/src/api/Dataset/archiveDataset.js b/frontend/src/api/Dataset/archiveDataset.js deleted file mode 100644 index 686d13238..000000000 --- a/frontend/src/api/Dataset/archiveDataset.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const archiveDataset = (datasetUri) => ({ - variables: { - datasetUri - }, - mutation: gql` - mutation archiveDataset($datasetUri: String!) { - archiveDataset(datasetUri: $datasetUri) - } - ` -}); - -export default archiveDataset; diff --git a/frontend/src/api/Dataset/generateDatasetAccessToken.js b/frontend/src/api/Dataset/generateDatasetAccessToken.js deleted file mode 100644 index 41a15554a..000000000 --- a/frontend/src/api/Dataset/generateDatasetAccessToken.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const generateDatasetAccessToken = (datasetUri) => ({ - variables: { - datasetUri - }, - mutation: gql` - mutation GenerateDatasetAccessToken($datasetUri: String!) { - generateDatasetAccessToken(datasetUri: $datasetUri) - } - ` -}); - -export default generateDatasetAccessToken; diff --git a/frontend/src/api/Dataset/getCrawlerStatus.js b/frontend/src/api/Dataset/getCrawlerStatus.js deleted file mode 100644 index 47d1c2258..000000000 --- a/frontend/src/api/Dataset/getCrawlerStatus.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getCrawlerStatus = ({ datasetUri, name }) => ({ - variables: { - datasetUri, - input: name - }, - query: gql`query GetCrawlerStatus($datasetUri:String, name:String){ - getCrawlerStatus(datasetUri:$datasetUri,name:$name){ - Name - AwsAccountId - region - status - } - }` -}); - -export default getCrawlerStatus; diff --git a/frontend/src/api/Dataset/getDatasetAdminConsoleUrl.js b/frontend/src/api/Dataset/getDatasetAdminConsoleUrl.js deleted file mode 100644 index 57bdfc984..000000000 --- a/frontend/src/api/Dataset/getDatasetAdminConsoleUrl.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetAssumeRoleUrl = (datasetUri) => ({ - variables: { - datasetUri - }, - query: gql` - query GetDatasetAssumeRoleUrl($datasetUri: String!) { - getDatasetAssumeRoleUrl(datasetUri: $datasetUri) - } - ` -}); - -export default getDatasetAssumeRoleUrl; diff --git a/frontend/src/api/Dataset/getDatasetETLCredentials.js b/frontend/src/api/Dataset/getDatasetETLCredentials.js deleted file mode 100644 index 616e579dd..000000000 --- a/frontend/src/api/Dataset/getDatasetETLCredentials.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetETLCredentials = (datasetUri) => ({ - variables: { - datasetUri - }, - query: gql` - query GetDatasetETLCredentials($datasetUri: String!) { - getDatasetETLCredentials(datasetUri: $datasetUri) - } - ` -}); - -export default getDatasetETLCredentials; diff --git a/frontend/src/api/Dataset/getDatasetPresignedUrl.js b/frontend/src/api/Dataset/getDatasetPresignedUrl.js deleted file mode 100644 index a1d44ec21..000000000 --- a/frontend/src/api/Dataset/getDatasetPresignedUrl.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetPresignedUrl = ({ datasetUri, input }) => ({ - variables: { - datasetUri, - input - }, - query: gql` - query GetDatasetPresignedUrl( - $datasetUri: String! - $input: DatasetPresignedUrlInput - ) { - getDatasetPresignedUrl(datasetUri: $datasetUri, input: $input) - } - ` -}); - -export default getDatasetPresignedUrl; diff --git a/frontend/src/api/Dataset/getDatasetSchema.js b/frontend/src/api/Dataset/getDatasetSchema.js deleted file mode 100644 index cb7204f9d..000000000 --- a/frontend/src/api/Dataset/getDatasetSchema.js +++ /dev/null @@ -1,46 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetSchema = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($datasetUri: String!, $filter: DatasetTableFilter) { - getDataset(datasetUri: $datasetUri) { - tables(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - tableUri - created - GlueTableName - GlueDatabaseName - description - stage - userRoleForTable - columns { - count - page - pages - hasNext - hasPrevious - nodes { - name - columnUri - label - typeName - columnType - } - } - } - } - } - } - ` -}); - -export default getDatasetSchema; diff --git a/frontend/src/api/Dataset/getDatasetSummary.js b/frontend/src/api/Dataset/getDatasetSummary.js deleted file mode 100644 index 0e6ae38d1..000000000 --- a/frontend/src/api/Dataset/getDatasetSummary.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetSummary = (datasetUri) => ({ - variables: { - datasetUri - }, - query: gql` - query GetDatasetSummary($datasetUri: String!) { - getDatasetSummary(datasetUri: $datasetUri) - } - ` -}); - -export default getDatasetSummary; diff --git a/frontend/src/api/Dataset/listDatasetContributors.js b/frontend/src/api/Dataset/listDatasetContributors.js deleted file mode 100644 index 12498db8f..000000000 --- a/frontend/src/api/Dataset/listDatasetContributors.js +++ /dev/null @@ -1,31 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetContributors = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($filter: DatasetContributorFilter, $datasetUri: String!) { - getDataset(datasetUri: $datasetUri) { - datasetUri - contributors(filter: $filter) { - count - page - pageSize - hasNext - hasPrevious - pages - nodes { - userName - userRoleForDataset - userRoleInEnvironment - created - } - } - } - } - ` -}); - -export default listDatasetContributors; diff --git a/frontend/src/api/Dataset/listDatasetLoaders.js b/frontend/src/api/Dataset/listDatasetLoaders.js deleted file mode 100644 index 963e19a2d..000000000 --- a/frontend/src/api/Dataset/listDatasetLoaders.js +++ /dev/null @@ -1,34 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetLoaders = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($filter: DatasetLoaderFilter, $datasetUri: String!) { - getDataset(datasetUri: $datasetUri) { - datasetUri - loaders(filter: $filter) { - count - page - pageSize - hasNext - hasPrevious - pages - nodes { - loaderUri - description - label - IAMPrincipalArn - description - label - tags - } - } - } - } - ` -}); - -export default listDatasetLoaders; diff --git a/frontend/src/api/Dataset/listDatasetObjects.js b/frontend/src/api/Dataset/listDatasetObjects.js deleted file mode 100644 index 2cda3f56d..000000000 --- a/frontend/src/api/Dataset/listDatasetObjects.js +++ /dev/null @@ -1,46 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetObjects = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query GetDataset($datasetUri:String!,$filter:DatasetTableFilter){ - getDataset(datasetUri:$datasetUri){ - datasetUri - locations(filter:$filer){ - count - page - pages - hasNext - hasPrevious - nodes{ - locationUri - created - label - } - } - - } - tables(filter:$filter){ - count - page - pages - hasNext - hasPrevious - nodes{ - datasetUri - tableUri - created - GlueTableName - label - } - } - - } - } - ` -}); - -export default listDatasetObjects; diff --git a/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js b/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js deleted file mode 100644 index d610e297a..000000000 --- a/frontend/src/api/Dataset/listDeltaLakeCrawlerRuns.js +++ /dev/null @@ -1,23 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDeltaLakeCrawlerRuns = ({ datasetUri }) => ({ - variables: { - datasetUri - }, - query: gql` - query listDeltaLakeCrawlerRuns($datasetUri: String!) { - listDeltaLakeCrawlerRuns(datasetUri: $datasetUri) { - datasetUri - GlueJobName - GlueJobRunId - AwsAccountId - GlueTriggerName - created - status - owner - } - } - ` -}); - -export default listDeltaLakeCrawlerRuns; diff --git a/frontend/src/api/Dataset/listTablePermissions.js b/frontend/src/api/Dataset/listTablePermissions.js deleted file mode 100644 index 446a63804..000000000 --- a/frontend/src/api/Dataset/listTablePermissions.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listTablePermissions = ({ tableUri }) => ({ - variables: { - tableUri - }, - query: gql` - query GetDatasetTable($tableUri: String!) { - getDatasetTable(tableUri: $tableUri) { - tableUri - userRoleForTable - permissions { - count - nodes { - userName - userRoleForTable - created - } - } - } - } - ` -}); - -export default listTablePermissions; diff --git a/frontend/src/api/Dataset/publishDatasetLocationUpdate.js b/frontend/src/api/Dataset/publishDatasetLocationUpdate.js deleted file mode 100644 index 25bfbdc39..000000000 --- a/frontend/src/api/Dataset/publishDatasetLocationUpdate.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetStorageLocationUpdate = ({ locationUri }) => ({ - variables: { - locationUri - }, - mutation: gql` - mutation publishDatasetStorageLocationUpdate($locationUri: String!) { - publishDatasetStorageLocationUpdate(locationUri: $locationUri) - } - ` -}); - -export default publishDatasetStorageLocationUpdate; diff --git a/frontend/src/api/Dataset/publishDatasetUpdate.js b/frontend/src/api/Dataset/publishDatasetUpdate.js deleted file mode 100644 index 2d542c9b2..000000000 --- a/frontend/src/api/Dataset/publishDatasetUpdate.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetUpdate = ({ datasetUri, s3Prefix }) => ({ - variables: { - datasetUri, - s3Prefix - }, - mutation: gql` - mutation publishDatasetUpdate($datasetUri: String!, $s3Prefix: String!) { - publishDatasetUpdate(datasetUri: $datasetUri, s3Prefix: $s3Prefix) - } - ` -}); - -export default publishDatasetUpdate; diff --git a/frontend/src/api/Dataset/removeDatasetContributor.js b/frontend/src/api/Dataset/removeDatasetContributor.js deleted file mode 100644 index b44ce9f0f..000000000 --- a/frontend/src/api/Dataset/removeDatasetContributor.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeDatasetContributor = ({ userName, datasetUri }) => ({ - variables: { userName, datasetUri }, - mutation: gql` - mutation RemoveDatasetContributor($datasetUri: String, $userName: String) { - removeDatasetContributor(datasetUri: $datasetUri, userName: $userName) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default removeDatasetContributor; diff --git a/frontend/src/api/Dataset/removeDatasetLoader.js b/frontend/src/api/Dataset/removeDatasetLoader.js deleted file mode 100644 index d99352f75..000000000 --- a/frontend/src/api/Dataset/removeDatasetLoader.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeDatasetLoader = ({ loaderUri }) => ({ - variables: { loaderUri }, - mutation: gql` - mutation RemoveDatasetLoader($loaderUri: String) { - removeDatasetLoader(loaderUri: $loaderUri) - } - ` -}); - -export default removeDatasetLoader; diff --git a/frontend/src/api/Dataset/removeDatasetStorageLocation.js b/frontend/src/api/Dataset/removeDatasetStorageLocation.js deleted file mode 100644 index 1a3ea1435..000000000 --- a/frontend/src/api/Dataset/removeDatasetStorageLocation.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDatasetStorageLocation = ({ locationUri }) => ({ - variables: { locationUri }, - mutation: gql` - mutation DeleteDatasetStorageLocation($locationUri: String) { - deleteDatasetStorageLocation(locationUri: $locationUri) - } - ` -}); - -export default deleteDatasetStorageLocation; diff --git a/frontend/src/api/Dataset/removeTablePermission.js b/frontend/src/api/Dataset/removeTablePermission.js deleted file mode 100644 index 45df8204f..000000000 --- a/frontend/src/api/Dataset/removeTablePermission.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeTablePermissions = ({ tableUri, role, userName }) => ({ - variables: { - tableUri, - role, - userName - }, - mutation: gql` - mutation RemoveTablePermission($tableUri: String!, $userName: String!) { - removeTablePermission(tableUri: $tableUri, userName: $userName) - } - ` -}); - -export default removeTablePermissions; diff --git a/frontend/src/api/Dataset/saveDatasetSummary.js b/frontend/src/api/Dataset/saveDatasetSummary.js deleted file mode 100644 index 46f508919..000000000 --- a/frontend/src/api/Dataset/saveDatasetSummary.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const saveDatasetSummary = ({ datasetUri, content }) => ({ - variables: { - datasetUri, - content - }, - mutation: gql` - mutation SaveDatasetSummary($datasetUri: String!, $content: String) { - saveDatasetSummary(datasetUri: $datasetUri, content: $content) - } - ` -}); - -export default saveDatasetSummary; diff --git a/frontend/src/api/Dataset/updateDatasetContributor.js b/frontend/src/api/Dataset/updateDatasetContributor.js deleted file mode 100644 index 83a0ee6d7..000000000 --- a/frontend/src/api/Dataset/updateDatasetContributor.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetContributor = ({ userName, datasetUri, role }) => ({ - variables: { userName, datasetUri, role }, - mutation: gql` - mutation UpdateDatasetContributor( - $datasetUri: String - $userName: String - $role: DatasetRole - ) { - updateDatasetContributor( - datasetUri: $datasetUri - userName: $userName - role: $role - ) { - datasetUri - label - userRoleForDataset - } - } - ` -}); - -export default updateDatasetContributor; diff --git a/frontend/src/api/Dataset/updateDatasetStack.js b/frontend/src/api/Dataset/updateDatasetStack.js deleted file mode 100644 index 66778b572..000000000 --- a/frontend/src/api/Dataset/updateDatasetStack.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetStack = (datasetUri) => ({ - variables: { datasetUri }, - mutation: gql` - mutation updateDatasetStack($datasetUri: String!) { - updateDatasetStack(datasetUri: $datasetUri) - } - ` -}); - -export default updateDatasetStack; diff --git a/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js deleted file mode 100644 index 5143c173d..000000000 --- a/frontend/src/api/DatasetQualityRule/createDatasetQualityRule.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createDatasetQualityRule = ({ datasetUri, input }) => ({ - variables: { - datasetUri, - input - }, - mutation: gql` - mutation CreateDatasetQualityRule( - $datasetUri: String! - $input: NewDatasetQualityRuleInput - ) { - createDatasetQualityRule(datasetUri: $datasetUri, input: $input) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default createDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js b/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js deleted file mode 100644 index 2464047a8..000000000 --- a/frontend/src/api/DatasetQualityRule/deleteDatasetqualityRule.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDatasetQualityRule = (ruleUri) => ({ - variables: { - ruleUri - }, - mutation: gql` - mutation DeleteDatasetQualityRule($ruleUri: String!) { - deleteDatasetQualityRule(ruleUri: $ruleUri) - } - ` -}); - -export default deleteDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js deleted file mode 100644 index 038d229c5..000000000 --- a/frontend/src/api/DatasetQualityRule/getDatasetQualityRule.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetQualityRule = (ruleUri) => ({ - variables: { - ruleUri - }, - query: gql` - query GetDatasetQualityRule($ruleUri: String!) { - getDatasetQualityRule(ruleUri: $ruleUri) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default getDatasetQualityRule; diff --git a/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js b/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js deleted file mode 100644 index 7e7aecd91..000000000 --- a/frontend/src/api/DatasetQualityRule/listDatasetQualityRules.js +++ /dev/null @@ -1,32 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetQualityRules = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query ListDatasetQualityRules( - $datasetUri: String! - $filter: DatasetQualityRuleFilter - ) { - listDatasetQualityRules(datasetUri: $datasetUri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - ruleUri - name - label - description - created - query - } - } - } - ` -}); - -export default listDatasetQualityRules; diff --git a/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js b/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js deleted file mode 100644 index e03fd63ed..000000000 --- a/frontend/src/api/DatasetQualityRule/updateDatasetQualityRule.js +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateDatasetQualityRule = ({ ruleUri, input }) => ({ - variables: { - ruleUri, - input - }, - mutation: gql` - mutation UpdateDatasetQualityRule( - $ruleUri: String! - $input: ModifyDatasetQualityRuleInput - ) { - updateDatasetQualityRule(ruleUri: $ruleUri, input: $input) { - ruleUri - name - label - description - created - query - } - } - ` -}); - -export default updateDatasetQualityRule; diff --git a/frontend/src/api/DatasetTable/deleteDatasetTable.js b/frontend/src/api/DatasetTable/deleteDatasetTable.js deleted file mode 100644 index 0981e1c0d..000000000 --- a/frontend/src/api/DatasetTable/deleteDatasetTable.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteDatasetTable = ({ tableUri }) => ({ - variables: { - tableUri - }, - mutation: gql` - mutation deleteDatasetTable($tableUri: String!) { - deleteDatasetTable(tableUri: $tableUri) - } - ` -}); - -export default deleteDatasetTable; diff --git a/frontend/src/api/DatasetTable/getDatasetProfilingReport.js b/frontend/src/api/DatasetTable/getDatasetProfilingReport.js deleted file mode 100644 index eb2ca82b7..000000000 --- a/frontend/src/api/DatasetTable/getDatasetProfilingReport.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getDatasetTableProfilingReport = (jobUri) => ({ - variables: { - jobUri - }, - query: gql` - query getDatasetTableProfilingReport($jobUri: String!) { - getDatasetTableProfilingReport(jobUri: $jobUri) - } - ` -}); - -export default getDatasetTableProfilingReport; diff --git a/frontend/src/api/DatasetTable/getSharedDatasetTables.js b/frontend/src/api/DatasetTable/getSharedDatasetTables.js deleted file mode 100644 index 62a5a032b..000000000 --- a/frontend/src/api/DatasetTable/getSharedDatasetTables.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getSharedDatasetTables = ({ datasetUri, envUri }) => ({ - variables: { - datasetUri, - envUri - }, - query: gql` - query GetSharedDatasetTables($datasetUri: String!, $envUri: String!) { - getSharedDatasetTables(datasetUri: $datasetUri, envUri: $envUri) { - tableUri - GlueTableName - } - } - ` -}); - -export default getSharedDatasetTables; diff --git a/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js b/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js deleted file mode 100644 index 3fcd4fbad..000000000 --- a/frontend/src/api/DatasetTable/listDatasetTableProfilingJobs.js +++ /dev/null @@ -1,33 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetTableProfilingJobs = (tableUri) => ({ - variables: { - tableUri - }, - query: gql` - query GetDatasetTable($tableUri: String!) { - getDatasetTable(tableUri: $tableUri) { - datasetUri - owner - created - tableUri - AwsAccountId - GlueTableName - profilingJobs { - count - page - pages - hasNext - hasPrevious - nodes { - jobUri - created - status - } - } - } - } - ` -}); - -export default listDatasetTableProfilingJobs; diff --git a/frontend/src/api/DatasetTable/previewTable.js b/frontend/src/api/DatasetTable/previewTable.js deleted file mode 100644 index e9f442249..000000000 --- a/frontend/src/api/DatasetTable/previewTable.js +++ /dev/null @@ -1,22 +0,0 @@ -import { gql } from 'apollo-boost'; - -const previewTable = ({ tableUri, queryExecutionId }) => ({ - variables: { - tableUri, - queryExecutionId - }, - query: gql` - query PreviewTable($tableUri: String!, $queryExecutionId: String) { - previewTable(tableUri: $tableUri, queryExecutionId: $queryExecutionId) { - count - status - queryExecutionId - nodes { - data - } - } - } - ` -}); - -export default previewTable; diff --git a/frontend/src/api/DatasetTable/previewTable2.js b/frontend/src/api/DatasetTable/previewTable2.js deleted file mode 100644 index ffeef03a3..000000000 --- a/frontend/src/api/DatasetTable/previewTable2.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const previewTable2 = (tableUri) => ({ - variables: { - tableUri - }, - query: gql` - query PreviewTable2($tableUri: String!) { - previewTable2(tableUri: $tableUri) { - rows - fields - } - } - ` -}); - -export default previewTable2; diff --git a/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js b/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js deleted file mode 100644 index b8a44ff39..000000000 --- a/frontend/src/api/DatasetTable/publishDatasetTableUpdate.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const publishDatasetTableUpdate = ({ tableUri }) => ({ - variables: { - tableUri - }, - mutation: gql` - mutation publishDatasetTableUpdate($tableUri: String!) { - publishDatasetTableUpdate(tableUri: $tableUri) - } - ` -}); - -export default publishDatasetTableUpdate; diff --git a/frontend/src/api/DatasetTable/startProfilingJob.js b/frontend/src/api/DatasetTable/startProfilingJob.js deleted file mode 100644 index 8e61cfa4c..000000000 --- a/frontend/src/api/DatasetTable/startProfilingJob.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const startProfilingJob = (tableUri) => ({ - variables: { - tableUri - }, - mutation: gql` - mutation StartProfilingJob($tableUri: String!) { - startProfilingJob(tableUri: $tableUri) { - jobUri - } - } - ` -}); - -export default startProfilingJob; diff --git a/frontend/src/api/DatasetTable/startProfilingRun.js b/frontend/src/api/DatasetTable/startProfilingRun.js deleted file mode 100644 index 9da074e5b..000000000 --- a/frontend/src/api/DatasetTable/startProfilingRun.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const startDatasetProfilingRun = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation startDatasetProfilingRun($input: StartDatasetProfilingRunInput!) { - startDatasetProfilingRun(input: $input) { - profilingRunUri - } - } - ` -}); - -export default startDatasetProfilingRun; diff --git a/frontend/src/api/Environment/addConsumptionRoleToEnvironment.js b/frontend/src/api/Environment/addConsumptionRoleToEnvironment.js deleted file mode 100644 index fb41386bc..000000000 --- a/frontend/src/api/Environment/addConsumptionRoleToEnvironment.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addConsumptionRoleToEnvironment = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation addConsumptionRoleToEnvironment($input: AddConsumptionRoleToEnvironmentInput!) { - addConsumptionRoleToEnvironment(input: $input) { - consumptionRoleUri - consumptionRoleName - environmentUri - groupUri - IAMRoleArn - } - } - ` -}); - -export default addConsumptionRoleToEnvironment; diff --git a/frontend/src/api/Environment/createEnvironment.js b/frontend/src/api/Environment/createEnvironment.js deleted file mode 100644 index 1314f0bf3..000000000 --- a/frontend/src/api/Environment/createEnvironment.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createEnvironment = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation CreateEnvironment($input: NewEnvironmentInput) { - createEnvironment(input: $input) { - environmentUri - label - userRoleInEnvironment - SamlGroupName - AwsAccountId - created - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled - } - } - ` -}); - -export default createEnvironment; diff --git a/frontend/src/api/Environment/disableDataSubscriptions.js b/frontend/src/api/Environment/disableDataSubscriptions.js deleted file mode 100644 index ef04c66d1..000000000 --- a/frontend/src/api/Environment/disableDataSubscriptions.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const DisableDataSubscriptions = ({ environmentUri }) => ({ - variables: { - environmentUri - }, - mutation: gql` - mutation DisableDataSubscriptions($environmentUri: String!) { - DisableDataSubscriptions(environmentUri: $environmentUri) - } - ` -}); - -export default DisableDataSubscriptions; diff --git a/frontend/src/api/Environment/enableDataSubscriptions.js b/frontend/src/api/Environment/enableDataSubscriptions.js deleted file mode 100644 index 6275ca832..000000000 --- a/frontend/src/api/Environment/enableDataSubscriptions.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const enableDataSubscriptions = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - input - }, - mutation: gql` - mutation enableDataSubscriptions( - $environmentUri: String! - $input: EnableDataSubscriptionsInput - ) { - enableDataSubscriptions(environmentUri: $environmentUri, input: $input) - } - ` -}); - -export default enableDataSubscriptions; diff --git a/frontend/src/api/Environment/generateEnvironmentAccessToken.js b/frontend/src/api/Environment/generateEnvironmentAccessToken.js deleted file mode 100644 index 06b8f6912..000000000 --- a/frontend/src/api/Environment/generateEnvironmentAccessToken.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const generateEnvironmentAccessToken = ({ environmentUri, groupUri }) => ({ - variables: { - environmentUri, - groupUri - }, - query: gql` - query GenerateEnvironmentAccessToken( - $environmentUri: String! - $groupUri: String - ) { - generateEnvironmentAccessToken( - environmentUri: $environmentUri - groupUri: $groupUri - ) - } - ` -}); - -export default generateEnvironmentAccessToken; diff --git a/frontend/src/api/Environment/getCDKExecPolicyPresignedUrl.js b/frontend/src/api/Environment/getCDKExecPolicyPresignedUrl.js deleted file mode 100644 index 026f6668a..000000000 --- a/frontend/src/api/Environment/getCDKExecPolicyPresignedUrl.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getCDKExecPolicyPresignedUrl = (organizationUri) => ({ - variables: { - organizationUri - }, - query: gql` - query getCDKExecPolicyPresignedUrl($organizationUri: String!) { - getCDKExecPolicyPresignedUrl(organizationUri: $organizationUri) - } - ` -}); - -export default getCDKExecPolicyPresignedUrl; diff --git a/frontend/src/api/Environment/getEnvironmentAssumeRoleUrl.js b/frontend/src/api/Environment/getEnvironmentAssumeRoleUrl.js deleted file mode 100644 index b70fa6720..000000000 --- a/frontend/src/api/Environment/getEnvironmentAssumeRoleUrl.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getEnvironmentAssumeRoleUrl = ({ environmentUri, groupUri }) => ({ - variables: { - environmentUri, - groupUri - }, - query: gql` - query getEnvironmentAssumeRoleUrl( - $environmentUri: String! - $groupUri: String - ) { - getEnvironmentAssumeRoleUrl( - environmentUri: $environmentUri - groupUri: $groupUri - ) - } - ` -}); - -export default getEnvironmentAssumeRoleUrl; diff --git a/frontend/src/api/Environment/getPivotRoleExternalId.js b/frontend/src/api/Environment/getPivotRoleExternalId.js deleted file mode 100644 index d39d3db42..000000000 --- a/frontend/src/api/Environment/getPivotRoleExternalId.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getPivotRoleExternalId = (organizationUri) => ({ - variables: { - organizationUri - }, - query: gql` - query getPivotRoleExternalId($organizationUri: String!) { - getPivotRoleExternalId(organizationUri: $organizationUri) - } - ` -}); - -export default getPivotRoleExternalId; diff --git a/frontend/src/api/Environment/getPivotRoleName.js b/frontend/src/api/Environment/getPivotRoleName.js deleted file mode 100644 index 92219a37d..000000000 --- a/frontend/src/api/Environment/getPivotRoleName.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getPivotRoleName = (organizationUri) => ({ - variables: { - organizationUri - }, - query: gql` - query getPivotRoleName($organizationUri: String!) { - getPivotRoleName(organizationUri: $organizationUri) - } - ` -}); - -export default getPivotRoleName; diff --git a/frontend/src/api/Environment/getPivotRolePresignedUrl.js b/frontend/src/api/Environment/getPivotRolePresignedUrl.js deleted file mode 100644 index d83866504..000000000 --- a/frontend/src/api/Environment/getPivotRolePresignedUrl.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getPivotRolePresignedUrl = (organizationUri) => ({ - variables: { - organizationUri - }, - query: gql` - query getPivotRolePresignedUrl($organizationUri: String!) { - getPivotRolePresignedUrl(organizationUri: $organizationUri) - } - ` -}); - -export default getPivotRolePresignedUrl; diff --git a/frontend/src/api/Environment/getTrustAccount.js b/frontend/src/api/Environment/getTrustAccount.js deleted file mode 100644 index 501fba02d..000000000 --- a/frontend/src/api/Environment/getTrustAccount.js +++ /dev/null @@ -1,11 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getTrustAccount = () => ({ - query: gql` - query GetTrustAccount { - getTrustAccount - } - ` -}); - -export default getTrustAccount; diff --git a/frontend/src/api/Environment/inviteGroup.js b/frontend/src/api/Environment/inviteGroup.js deleted file mode 100644 index c596b6742..000000000 --- a/frontend/src/api/Environment/inviteGroup.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const inviteGroupOnEnvironment = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation inviteGroupOnEnvironment($input: InviteGroupOnEnvironmentInput!) { - inviteGroupOnEnvironment(input: $input) { - environmentUri - } - } - ` -}); - -export default inviteGroupOnEnvironment; diff --git a/frontend/src/api/Environment/listDataItemsSharedWithEnvGroup.js b/frontend/src/api/Environment/listDataItemsSharedWithEnvGroup.js deleted file mode 100644 index 2297bab75..000000000 --- a/frontend/src/api/Environment/listDataItemsSharedWithEnvGroup.js +++ /dev/null @@ -1,46 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDataItemsSharedWithEnvGroup = ({ filter, environmentUri, groupUri }) => ({ - variables: { - environmentUri, - groupUri, - filter - }, - query: gql` - query listDataItemsSharedWithEnvGroup( - $filter: EnvironmentDataItemFilter - $environmentUri: String - $groupUri: String - ) { - listDataItemsSharedWithEnvGroup( - environmentUri: $environmentUri - groupUri: $groupUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - shareUri - environmentName - environmentUri - organizationName - organizationUri - datasetUri - datasetName - itemType - itemAccess - GlueDatabaseName - GlueTableName - S3AccessPointName - created - principalId - } - } - } - ` -}); - -export default listDataItemsSharedWithEnvGroup; diff --git a/frontend/src/api/Environment/listDatasetsPublishedInEnvironment.js b/frontend/src/api/Environment/listDatasetsPublishedInEnvironment.js deleted file mode 100644 index 8663ad82f..000000000 --- a/frontend/src/api/Environment/listDatasetsPublishedInEnvironment.js +++ /dev/null @@ -1,43 +0,0 @@ -import { gql } from 'apollo-boost'; - -const searchEnvironmentDataItems = ({ filter, environmentUri }) => ({ - variables: { - environmentUri, - filter - }, - query: gql` - query SearchEnvironmentDataItems( - $filter: EnvironmentDataItemFilter - $environmentUri: String - ) { - searchEnvironmentDataItems( - environmentUri: $environmentUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - shareUri - environmentName - environmentUri - organizationName - organizationUri - datasetUri - datasetName - itemType - itemAccess - GlueDatabaseName - GlueTableName - S3AccessPointName - created - principalId - } - } - } - ` -}); - -export default searchEnvironmentDataItems; diff --git a/frontend/src/api/Environment/listEnvironmentMembers.js b/frontend/src/api/Environment/listEnvironmentMembers.js deleted file mode 100644 index 009897ec0..000000000 --- a/frontend/src/api/Environment/listEnvironmentMembers.js +++ /dev/null @@ -1,29 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentMembers = ({ term, environmentUri }) => ({ - variables: { - environmentUri, - filter: { term: term || '' } - }, - query: gql` - query getEnvironment( - $filter: OrganizationUserFilter - $environmentUri: String - ) { - getEnvironment(environmentUri: $environmentUri) { - environmentUri - userRoleInEnvironment - users(filter: $filter) { - count - nodes { - userName - userRoleInEnvironment - created - } - } - } - } - ` -}); - -export default listEnvironmentMembers; diff --git a/frontend/src/api/Environment/listEnvironmentNotMembers.js b/frontend/src/api/Environment/listEnvironmentNotMembers.js deleted file mode 100644 index 3d8839e03..000000000 --- a/frontend/src/api/Environment/listEnvironmentNotMembers.js +++ /dev/null @@ -1,29 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentNotMembers = ({ term, environmentUri }) => ({ - variables: { - environmentUri, - filter: { term: term || '' } - }, - query: gql` - query getEnvironment( - $filter: OrganizationUserFilter - $environmentUri: String - ) { - getEnvironment(environmentUri: $environmentUri) { - environmentUri - userRoleInEnvironment - notMembers(filter: $filter) { - count - nodes { - userName - userRoleInEnvironment - created - } - } - } - } - ` -}); - -export default listEnvironmentNotMembers; diff --git a/frontend/src/api/Environment/listEnvironmentPermissions.js b/frontend/src/api/Environment/listEnvironmentPermissions.js deleted file mode 100644 index d2ce6b208..000000000 --- a/frontend/src/api/Environment/listEnvironmentPermissions.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentGroupInvitationPermissions = ({ environmentUri }) => ({ - variables: { - environmentUri - }, - query: gql` - query listEnvironmentGroupInvitationPermissions($environmentUri: String) { - listEnvironmentGroupInvitationPermissions( - environmentUri: $environmentUri - ) { - permissionUri - name - description - } - } - ` -}); - -export default listEnvironmentGroupInvitationPermissions; diff --git a/frontend/src/api/Environment/listInvitedGroups.js b/frontend/src/api/Environment/listInvitedGroups.js deleted file mode 100644 index 766036083..000000000 --- a/frontend/src/api/Environment/listInvitedGroups.js +++ /dev/null @@ -1,27 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentGroups = ({ filter, environmentUri }) => ({ - variables: { - environmentUri, - filter - }, - query: gql` - query listEnvironmentGroups($filter: GroupFilter, $environmentUri: String) { - listEnvironmentGroups(environmentUri: $environmentUri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - groupUri - invitedBy - created - description - } - } - } - ` -}); - -export default listEnvironmentGroups; diff --git a/frontend/src/api/Environment/removeConsumptionRole.js b/frontend/src/api/Environment/removeConsumptionRole.js deleted file mode 100644 index 3a26a64f7..000000000 --- a/frontend/src/api/Environment/removeConsumptionRole.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeConsumptionRoleFromEnvironment = ({ environmentUri, consumptionRoleUri }) => ({ - variables: { - environmentUri, - consumptionRoleUri - }, - mutation: gql` - mutation removeConsumptionRoleFromEnvironment( - $environmentUri: String! - $consumptionRoleUri: String! - ) { - removeConsumptionRoleFromEnvironment( - environmentUri: $environmentUri - consumptionRoleUri: $consumptionRoleUri - ) - } - ` -}); - -export default removeConsumptionRoleFromEnvironment; diff --git a/frontend/src/api/Environment/removeGroup.js b/frontend/src/api/Environment/removeGroup.js deleted file mode 100644 index f3245d11f..000000000 --- a/frontend/src/api/Environment/removeGroup.js +++ /dev/null @@ -1,23 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeGroupFromEnvironment = ({ environmentUri, groupUri }) => ({ - variables: { - environmentUri, - groupUri - }, - mutation: gql` - mutation removeGroupFromEnvironment( - $environmentUri: String! - $groupUri: String! - ) { - removeGroupFromEnvironment( - environmentUri: $environmentUri - groupUri: $groupUri - ) { - environmentUri - } - } - ` -}); - -export default removeGroupFromEnvironment; diff --git a/frontend/src/api/Environment/removeMember.js b/frontend/src/api/Environment/removeMember.js deleted file mode 100644 index 22a48cf7e..000000000 --- a/frontend/src/api/Environment/removeMember.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeUserFromEnvironment = ({ environmentUri, userName }) => ({ - variables: { environmentUri, userName }, - mutation: gql` - mutation RemoveUserFromEnvironment( - $environmentUri: String! - $userName: String! - ) { - removeUserFromEnvironment( - environmentUri: $environmentUri - userName: $userName - ) - } - ` -}); - -export default removeUserFromEnvironment; diff --git a/frontend/src/api/Environment/updateEnvironment.js b/frontend/src/api/Environment/updateEnvironment.js deleted file mode 100644 index 9a715b91f..000000000 --- a/frontend/src/api/Environment/updateEnvironment.js +++ /dev/null @@ -1,30 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateEnvironment = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - input - }, - mutation: gql` - mutation UpdateEnvironment( - $environmentUri: String! - $input: ModifyEnvironmentInput - ) { - updateEnvironment(environmentUri: $environmentUri, input: $input) { - environmentUri - label - userRoleInEnvironment - SamlGroupName - AwsAccountId - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled - created - } - } - ` -}); - -export default updateEnvironment; diff --git a/frontend/src/api/Environment/updateEnvironmentMemberRole.js b/frontend/src/api/Environment/updateEnvironmentMemberRole.js deleted file mode 100644 index 1bca1348c..000000000 --- a/frontend/src/api/Environment/updateEnvironmentMemberRole.js +++ /dev/null @@ -1,22 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateMemberRole = ({ environmentUri, userName, role }) => ({ - variables: { environmentUri, userName, role: role || 'Member' }, - mutation: gql` - mutation UpdateGroupMember( - $environmentUri: String! - $userName: String! - $role: EnvironmentPermission - ) { - updateUserEnvironmentRole( - environmentUri: $environmentUri - userName: $userName - role: $role - ) { - environmentUri - } - } - ` -}); - -export default updateMemberRole; diff --git a/frontend/src/api/Environment/updateEnvironmentStack.js b/frontend/src/api/Environment/updateEnvironmentStack.js deleted file mode 100644 index dfe8c65fd..000000000 --- a/frontend/src/api/Environment/updateEnvironmentStack.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateEnvironmentStack = ({ environmentUri }) => ({ - variables: { environmentUri }, - mutation: gql` - mutation updateEnvironmentStack($environmentUri: String!) { - updateEnvironmentStack(environmentUri: $environmentUri) - } - ` -}); - -export default updateEnvironmentStack; diff --git a/frontend/src/api/Environment/updateGroupEnvironmentPermissions.js b/frontend/src/api/Environment/updateGroupEnvironmentPermissions.js deleted file mode 100644 index f6923829e..000000000 --- a/frontend/src/api/Environment/updateGroupEnvironmentPermissions.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateGroupEnvironmentPermissions = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation updateGroupEnvironmentPermissions( - $input: InviteGroupOnEnvironmentInput! - ) { - updateGroupEnvironmentPermissions(input: $input) { - environmentUri - } - } - ` -}); - -export default updateGroupEnvironmentPermissions; diff --git a/frontend/src/api/Environment/verifyEnvironment.js b/frontend/src/api/Environment/verifyEnvironment.js deleted file mode 100644 index 5fdfd4eb7..000000000 --- a/frontend/src/api/Environment/verifyEnvironment.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const checkEnvironment = (input) => ({ - variables: { - input - }, - query: gql` - query CheckEnvironment($input: AwsEnvironmentInput!) { - checkEnvironment(input: $input) - } - ` -}); - -export default checkEnvironment; diff --git a/frontend/src/api/Feed/index.js b/frontend/src/api/Feed/index.js deleted file mode 100644 index 45d5ad414..000000000 --- a/frontend/src/api/Feed/index.js +++ /dev/null @@ -1,4 +0,0 @@ -import listFeedMessages from './listFeedMessages'; -import postFeedMessage from './postMessage'; - -export { listFeedMessages, postFeedMessage }; diff --git a/frontend/src/api/Glossary/addCategory.js b/frontend/src/api/Glossary/addCategory.js deleted file mode 100644 index 8eadd2dc8..000000000 --- a/frontend/src/api/Glossary/addCategory.js +++ /dev/null @@ -1,22 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createCategory = ({ input, parentUri }) => ({ - variables: { - input, - parentUri - }, - mutation: gql` - mutation CreateCategory($parentUri: String!, $input: CreateCategoryInput) { - createCategory(parentUri: $parentUri, input: $input) { - nodeUri - label - path - readme - created - owner - } - } - ` -}); - -export default createCategory; diff --git a/frontend/src/api/Glossary/addTerm.js b/frontend/src/api/Glossary/addTerm.js deleted file mode 100644 index 604bd70bc..000000000 --- a/frontend/src/api/Glossary/addTerm.js +++ /dev/null @@ -1,22 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createTerm = ({ input, parentUri }) => ({ - variables: { - input, - parentUri - }, - mutation: gql` - mutation CreateTerm($parentUri: String!, $input: CreateTermInput) { - createTerm(parentUri: $parentUri, input: $input) { - nodeUri - label - path - readme - created - owner - } - } - ` -}); - -export default createTerm; diff --git a/frontend/src/api/Glossary/approveTermAssociation.js b/frontend/src/api/Glossary/approveTermAssociation.js deleted file mode 100644 index 79d7c6a6d..000000000 --- a/frontend/src/api/Glossary/approveTermAssociation.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const approveTermAssociation = (linkUri) => ({ - variables: { - linkUri - }, - mutation: gql` - mutation ApproveTermAssociation($linkUri: String!) { - approveTermAssociation(linkUri: $linkUri) - } - ` -}); - -export default approveTermAssociation; diff --git a/frontend/src/api/Glossary/deleteCategory.js b/frontend/src/api/Glossary/deleteCategory.js deleted file mode 100644 index 2dfc51b66..000000000 --- a/frontend/src/api/Glossary/deleteCategory.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteCategory = (nodeUri) => ({ - variables: { - nodeUri - }, - mutation: gql` - mutation deleteCategory($nodeUri: String!) { - deleteCategory(nodeUri: $nodeUri) - } - ` -}); - -export default deleteCategory; diff --git a/frontend/src/api/Glossary/deleteGlossary.js b/frontend/src/api/Glossary/deleteGlossary.js deleted file mode 100644 index 883ca0796..000000000 --- a/frontend/src/api/Glossary/deleteGlossary.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteGlossary = (nodeUri) => ({ - variables: { - nodeUri - }, - mutation: gql` - mutation deleteGlossary($nodeUri: String!) { - deleteGlossary(nodeUri: $nodeUri) - } - ` -}); - -export default deleteGlossary; diff --git a/frontend/src/api/Glossary/deleteTerm.js b/frontend/src/api/Glossary/deleteTerm.js deleted file mode 100644 index 77eb4e713..000000000 --- a/frontend/src/api/Glossary/deleteTerm.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteTerm = (nodeUri) => ({ - variables: { - nodeUri - }, - mutation: gql` - mutation deleteTerm($nodeUri: String!) { - deleteTerm(nodeUri: $nodeUri) - } - ` -}); - -export default deleteTerm; diff --git a/frontend/src/api/Glossary/dismissTermAssociation.js b/frontend/src/api/Glossary/dismissTermAssociation.js deleted file mode 100644 index d5be521fc..000000000 --- a/frontend/src/api/Glossary/dismissTermAssociation.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const dismissTermAssociation = (linkUri) => ({ - variables: { - linkUri - }, - mutation: gql` - mutation DismissTermAssociation($linkUri: String!) { - dismissTermAssociation(linkUri: $linkUri) - } - ` -}); - -export default dismissTermAssociation; diff --git a/frontend/src/api/Glossary/getTerm.js b/frontend/src/api/Glossary/getTerm.js deleted file mode 100644 index 23822bc3f..000000000 --- a/frontend/src/api/Glossary/getTerm.js +++ /dev/null @@ -1,45 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getTerm = ({ nodeUri }) => ({ - variables: { - nodeUri - }, - query: gql` - query GetTerm($nodeUri: String!) { - getTerm(nodeUri: $nodeUri) { - nodeUri - label - readme - created - owner - status - path - stats { - categories - terms - associations - } - associations { - count - pages - hasNext - hasPrevious - nodes { - linkUri - targetUri - approvedByOwner - approvedBySteward - target { - __typename - ... on Dataset { - label - } - } - } - } - } - } - ` -}); - -export default getTerm; diff --git a/frontend/src/api/Glossary/linkTerm.js b/frontend/src/api/Glossary/linkTerm.js deleted file mode 100644 index b3a29030a..000000000 --- a/frontend/src/api/Glossary/linkTerm.js +++ /dev/null @@ -1,27 +0,0 @@ -import { gql } from 'apollo-boost'; - -const linkTerm = ({ nodeUri, targetUri, targetType }) => ({ - variables: { - nodeUri, - targetType, - targetUri - }, - mutation: gql` - mutation LinkTerm( - $nodeUri: String! - $targetUri: String! - $targetType: String! - ) { - linkTerm( - nodeUri: $nodeUri - targetUri: $targetUri - targetType: $targetType - ) { - linkUri - created - } - } - ` -}); - -export default linkTerm; diff --git a/frontend/src/api/Glossary/listAssetLinkedTerms.js b/frontend/src/api/Glossary/listAssetLinkedTerms.js deleted file mode 100644 index cb51922ea..000000000 --- a/frontend/src/api/Glossary/listAssetLinkedTerms.js +++ /dev/null @@ -1,43 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listAssetLinkedTerms = ({ uri, filter }) => ({ - variables: { - filter, - uri - }, - query: gql` - query ListAssetLinkedTerms( - $uri: String! - $filter: GlossaryTermTargetFilter - ) { - listAssetLinkedTerms(uri: $uri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - linkUri - nodeUri - owner - created - approvedByOwner - approvedBySteward - term { - label - readme - created - owner - glossary { - label - nodeUri - } - path - } - } - } - } - ` -}); - -export default listAssetLinkedTerms; diff --git a/frontend/src/api/Glossary/listGlossaryTree.js b/frontend/src/api/Glossary/listGlossaryTree.js deleted file mode 100644 index ff9ceec92..000000000 --- a/frontend/src/api/Glossary/listGlossaryTree.js +++ /dev/null @@ -1,83 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getGlossaryTree = ({ nodeUri, filter }) => ({ - variables: { - nodeUri, - filter - }, - query: gql` - query GetGlossaryTree( - $nodeUri: String! - $filter: GlossaryNodeSearchFilter - ) { - getGlossary(nodeUri: $nodeUri) { - nodeUri - label - readme - created - owner - status - path - admin - deleted - categories { - count - page - pages - hasNext - hasPrevious - nodes { - nodeUri - parentUri - label - readme - stats { - categories - terms - } - status - created - } - } - tree(filter: $filter) { - count - hasNext - hasPrevious - page - pages - nodes { - __typename - ... on Glossary { - nodeUri - label - readme - created - owner - path - } - ... on Category { - nodeUri - label - parentUri - readme - created - owner - path - } - ... on Term { - nodeUri - parentUri - label - readme - created - owner - path - } - } - } - } - } - ` -}); - -export default getGlossaryTree; diff --git a/frontend/src/api/Glossary/requestLink.js b/frontend/src/api/Glossary/requestLink.js deleted file mode 100644 index bbf1091d2..000000000 --- a/frontend/src/api/Glossary/requestLink.js +++ /dev/null @@ -1,27 +0,0 @@ -import { gql } from 'apollo-boost'; - -const requestLink = ({ nodeUri, targetUri, targetType }) => ({ - variables: { - nodeUri, - targetType, - targetUri - }, - mutation: gql` - mutation RequestLink( - $nodeUri: String! - $targetUri: String! - $targetType: String! - ) { - requestLink( - nodeUri: $nodeUri - targetUri: $targetUri - targetType: $targetType - ) { - linkUri - created - } - } - ` -}); - -export default requestLink; diff --git a/frontend/src/api/Glossary/searchGlossaryHierarchy.js b/frontend/src/api/Glossary/searchGlossaryHierarchy.js deleted file mode 100644 index a89fede90..000000000 --- a/frontend/src/api/Glossary/searchGlossaryHierarchy.js +++ /dev/null @@ -1,61 +0,0 @@ -import { gql } from 'apollo-boost'; - -const searchGlossaryHierarchy = ({ filter, targetUri }) => ({ - variables: { - filter, - targetUri - }, - query: gql` - query SearchGlossaryHierarchy($filter: TermFilter, $targetUri: String) { - searchGlossaryHierarchy(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - __typename - ... on Glossary { - nodeUri - parentUri - label - readme - created - owner - path - isMatch - } - ... on Category { - nodeUri - parentUri - label - readme - created - owner - path - isMatch - } - ... on Term { - nodeUri - parentUri - label - readme - created - owner - path - isMatch - assetLink(targetUri: $targetUri) { - nodeUri - targetUri - created - approvedByOwner - approvedBySteward - } - } - } - } - } - ` -}); - -export default searchGlossaryHierarchy; diff --git a/frontend/src/api/Groups/listCognitoGroups.js b/frontend/src/api/Groups/listCognitoGroups.js deleted file mode 100644 index 49d473a50..000000000 --- a/frontend/src/api/Groups/listCognitoGroups.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listCognitoGroups = ({ filter }) => ({ - variables: { - filter - }, - query: gql` - query listCognitoGroups ( - $filter: CognitoGroupFilter - ) { - listCognitoGroups ( - filter: $filter - ){ - groupName - } - } - ` -}); - -export default listCognitoGroups; diff --git a/frontend/src/api/Metric/index.js b/frontend/src/api/Metric/index.js deleted file mode 100644 index 757e50c46..000000000 --- a/frontend/src/api/Metric/index.js +++ /dev/null @@ -1,3 +0,0 @@ -import getMetrics from './getMetrics'; - -export { getMetrics }; diff --git a/frontend/src/api/Notification/archiveNotification.js b/frontend/src/api/Notification/archiveNotification.js deleted file mode 100644 index 59c37b3e0..000000000 --- a/frontend/src/api/Notification/archiveNotification.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const archiveNotification = ({ notificationUri }) => ({ - variables: { - notificationUri - }, - mutation: gql` - mutation deleteNotification($notificationUri: String!) { - deleteNotification(notificationUri: $notificationUri) - } - ` -}); - -export default archiveNotification; diff --git a/frontend/src/api/Notification/countDeletedNotifications.js b/frontend/src/api/Notification/countDeletedNotifications.js deleted file mode 100644 index 1e8bf2816..000000000 --- a/frontend/src/api/Notification/countDeletedNotifications.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const countDeletedNotifications = () => ({ - variables: {}, - query: gql` - query countDeletedNotifications { - countDeletedNotifications - } - ` -}); - -export default countDeletedNotifications; diff --git a/frontend/src/api/Notification/countReadNotifications.js b/frontend/src/api/Notification/countReadNotifications.js deleted file mode 100644 index 205a55ae6..000000000 --- a/frontend/src/api/Notification/countReadNotifications.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const countReadNotifications = () => ({ - variables: {}, - query: gql` - query countReadNotifications { - countReadNotifications - } - ` -}); - -export default countReadNotifications; diff --git a/frontend/src/api/Notification/countUnreadNotifications.js b/frontend/src/api/Notification/countUnreadNotifications.js deleted file mode 100644 index 196b63764..000000000 --- a/frontend/src/api/Notification/countUnreadNotifications.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -const countUnreadNotifications = () => ({ - variables: {}, - query: gql` - query countUnreadNotifications { - countUnreadNotifications - } - ` -}); - -export default countUnreadNotifications; diff --git a/frontend/src/api/Notification/markAsRead.js b/frontend/src/api/Notification/markAsRead.js deleted file mode 100644 index fd8e68a49..000000000 --- a/frontend/src/api/Notification/markAsRead.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const markNotificationAsRead = (notificationUri) => ({ - variables: { - notificationUri - }, - mutation: gql` - mutation markNotificationAsRead($notificationUri: String!) { - markNotificationAsRead(notificationUri: $notificationUri) - } - ` -}); - -export default markNotificationAsRead; diff --git a/frontend/src/api/Organization/addOrUpdateOrganizationTopic.js b/frontend/src/api/Organization/addOrUpdateOrganizationTopic.js deleted file mode 100644 index 493362364..000000000 --- a/frontend/src/api/Organization/addOrUpdateOrganizationTopic.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createTopic = ({ input, organizationUri }) => ({ - variables: { - organizationUri, - input - }, - mutation: gql` - mutation createTopic( - $organizationUri: String - $input: OrganizationTopicInput - ) { - createTopic(organizationUri: $organizationUri, input: $input) { - topicUri - label - description - created - owner - } - } - ` -}); - -export default createTopic; diff --git a/frontend/src/api/Organization/archiveOrganization.js b/frontend/src/api/Organization/archiveOrganization.js deleted file mode 100644 index 9004cd465..000000000 --- a/frontend/src/api/Organization/archiveOrganization.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const archiveOrganization = (organizationUri) => ({ - variables: { - organizationUri - }, - mutation: gql` - mutation ArciveOrg($organizationUri: String!) { - archiveOrganization(organizationUri: $organizationUri) - } - ` -}); - -export default archiveOrganization; diff --git a/frontend/src/api/Organization/createGroup.js b/frontend/src/api/Organization/createGroup.js deleted file mode 100644 index 91c2e0aa7..000000000 --- a/frontend/src/api/Organization/createGroup.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createGroup = ({ organizationUri, description, label, role }) => ({ - variables: { - input: { organizationUri, description, label, role: role || 'Member' } - }, - mutation: gql` - mutation CreateGroup($input: NewGroupInput) { - createGroup(input: $input) { - groupUri - label - groupRoleInOrganization - created - userRoleInGroup - } - } - ` -}); - -export default createGroup; diff --git a/frontend/src/api/Organization/inviteGroup.js b/frontend/src/api/Organization/inviteGroup.js deleted file mode 100644 index b09d432d7..000000000 --- a/frontend/src/api/Organization/inviteGroup.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const inviteGroupToOrganization = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation inviteGroupToOrganization( - $input: InviteGroupToOrganizationInput! - ) { - inviteGroupToOrganization(input: $input) { - organizationUri - } - } - ` -}); - -export default inviteGroupToOrganization; diff --git a/frontend/src/api/Organization/inviteUser.js b/frontend/src/api/Organization/inviteUser.js deleted file mode 100644 index 8ba893c3f..000000000 --- a/frontend/src/api/Organization/inviteUser.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const inviteUser = ({ organizationUri, userName, role }) => ({ - variables: { - input: { organizationUri, userName, role: role || 'Member' } - }, - mutation: gql` - mutation InviteUser($input: NewOrganizationUserInput) { - inviteUser(input: $input) { - userName - userRoleInOrganization - created - } - } - ` -}); - -export default inviteUser; diff --git a/frontend/src/api/Organization/listInvitedGroups.js b/frontend/src/api/Organization/listInvitedGroups.js deleted file mode 100644 index 7370537bc..000000000 --- a/frontend/src/api/Organization/listInvitedGroups.js +++ /dev/null @@ -1,31 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listOrganizationGroups = ({ filter, organizationUri }) => ({ - variables: { - organizationUri, - filter - }, - query: gql` - query listOrganizationGroups( - $filter: GroupFilter - $organizationUri: String! - ) { - listOrganizationGroups( - organizationUri: $organizationUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - groupUri - invitedBy - created - } - } - } - ` -}); -export default listOrganizationGroups; diff --git a/frontend/src/api/Organization/listOrganizationEnvironment.js b/frontend/src/api/Organization/listOrganizationEnvironment.js deleted file mode 100644 index 8098d4429..000000000 --- a/frontend/src/api/Organization/listOrganizationEnvironment.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listOrganizationGroups = ({ term, organizationUri }) => ({ - variables: { - organizationUri, - filter: { term: term || '' } - }, - query: gql` - query getOrg($organizationUri: String, $filter: GroupFilter) { - getOrganization(organizationUri: $organizationUri) { - groups(filter: $filter) { - count - nodes { - groupUri - label - created - groupRoleInOrganization - userRoleInGroup - } - } - } - } - ` -}); - -export default listOrganizationGroups; diff --git a/frontend/src/api/Organization/listOrganizationTopics.js b/frontend/src/api/Organization/listOrganizationTopics.js deleted file mode 100644 index c5d044ede..000000000 --- a/frontend/src/api/Organization/listOrganizationTopics.js +++ /dev/null @@ -1,32 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listOrganizationTopics = ({ filter, organizationUri }) => ({ - variables: { - organizationUri, - filter - }, - query: gql` - query ListOrganizationTopics( - $organizationUri: String - $filter: OrganizationTopicFilter - ) { - listOrganizationTopics( - organizationUri: $organizationUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - label - topicUri - description - } - } - } - ` -}); - -export default listOrganizationTopics; diff --git a/frontend/src/api/Organization/listOrganizationUsers.js b/frontend/src/api/Organization/listOrganizationUsers.js deleted file mode 100644 index b1982ea88..000000000 --- a/frontend/src/api/Organization/listOrganizationUsers.js +++ /dev/null @@ -1,35 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listOrganizationUsers = ({ filter, organizationUri }) => { - console.log('listOrganizationUsers'); - return { - variables: { - organizationUri, - filter - }, - query: gql` - query getOrg($organizationUri: String, $filter: OrganizationUserFilter) { - getOrganization(organizationUri: $organizationUri) { - organizationUri - label - userRoleInOrganization - users(filter: $filter) { - count - page - pageSize - pages - hasNext - hasPrevious - nodes { - userName - created - userRoleInOrganization - } - } - } - } - ` - }; -}; - -export default listOrganizationUsers; diff --git a/frontend/src/api/Organization/removeGroup.js b/frontend/src/api/Organization/removeGroup.js deleted file mode 100644 index 393cc98d3..000000000 --- a/frontend/src/api/Organization/removeGroup.js +++ /dev/null @@ -1,23 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeGroupFromOrganization = ({ organizationUri, groupUri }) => ({ - variables: { - organizationUri, - groupUri - }, - mutation: gql` - mutation removeGroupFromOrganization( - $organizationUri: String! - $groupUri: String! - ) { - removeGroupFromOrganization( - organizationUri: $organizationUri - groupUri: $groupUri - ) { - organizationUri - } - } - ` -}); - -export default removeGroupFromOrganization; diff --git a/frontend/src/api/Organization/removeUser.js b/frontend/src/api/Organization/removeUser.js deleted file mode 100644 index 96c592679..000000000 --- a/frontend/src/api/Organization/removeUser.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeUser = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation RemoveUser($input: RemoveOrganizationUserInput) { - removeUser(input: $input) - } - ` -}); - -export default removeUser; diff --git a/frontend/src/api/Organization/updateGroupRoleInOrganization.js b/frontend/src/api/Organization/updateGroupRoleInOrganization.js deleted file mode 100644 index fb9823652..000000000 --- a/frontend/src/api/Organization/updateGroupRoleInOrganization.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateGroupRoleInOrganization = ({ groupUri, role }) => ({ - variables: { - input: { role: role || 'Member' }, - groupUri - }, - mutation: gql` - mutation UpdateGroup($groupUri: String, $input: ModifyGroupInput) { - updateGroup(groupUri: $groupUri, input: $input) { - groupUri - groupRoleInOrganization - userRoleInGroup - created - updated - } - } - ` -}); - -export default updateGroupRoleInOrganization; diff --git a/frontend/src/api/Organization/updateTopic.js b/frontend/src/api/Organization/updateTopic.js deleted file mode 100644 index 9224d90ed..000000000 --- a/frontend/src/api/Organization/updateTopic.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateTopic = ({ input, topicUri }) => ({ - variables: { - topicUri, - input - }, - mutation: gql` - mutation UpdateTopic($topicUri: String, $input: OrganizationTopicInput) { - updateTopic(organizationUri: $organizationUri, input: $input) { - topicUri - label - description - created - owner - } - } - ` -}); - -export default updateTopic; diff --git a/frontend/src/api/Organization/updateUserRole.js b/frontend/src/api/Organization/updateUserRole.js deleted file mode 100644 index cc2923782..000000000 --- a/frontend/src/api/Organization/updateUserRole.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateUserRole = ({ organizationUri, userName, role }) => ({ - variables: { - input: { organizationUri, userName, role: role || 'Member' } - }, - mutation: gql` - mutation UpdateUser($input: ModifyOrganizationUserInput) { - updateUser(input: $input) { - userName - userRoleInOrganization - created - } - } - ` -}); - -export default updateUserRole; diff --git a/frontend/src/api/RedshiftCluster/addDatasetToCluster.js b/frontend/src/api/RedshiftCluster/addDatasetToCluster.js deleted file mode 100644 index d863c773f..000000000 --- a/frontend/src/api/RedshiftCluster/addDatasetToCluster.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addDatasetToCluster = ({ clusterUri, datasetUri }) => ({ - variables: { clusterUri, datasetUri }, - mutation: gql` - mutation addDatasetToRedshiftCluster( - $clusterUri: String - $datasetUri: String - ) { - addDatasetToRedshiftCluster( - clusterUri: $clusterUri - datasetUri: $datasetUri - ) - } - ` -}); - -export default addDatasetToCluster; diff --git a/frontend/src/api/RedshiftCluster/copyTableToCluster.js b/frontend/src/api/RedshiftCluster/copyTableToCluster.js deleted file mode 100644 index f22c9a01e..000000000 --- a/frontend/src/api/RedshiftCluster/copyTableToCluster.js +++ /dev/null @@ -1,30 +0,0 @@ -import { gql } from 'apollo-boost'; - -const copyTableToCluster = ({ - clusterUri, - datasetUri, - tableUri, - schema, - dataLocation -}) => ({ - variables: { clusterUri, datasetUri, tableUri, schema, dataLocation }, - mutation: gql` - mutation enableRedshiftClusterDatasetTableCopy( - $clusterUri: String! - $datasetUri: String! - $tableUri: String! - $schema: String! - $dataLocation: String - ) { - enableRedshiftClusterDatasetTableCopy( - clusterUri: $clusterUri - datasetUri: $datasetUri - tableUri: $tableUri - schema: $schema - dataLocation: $dataLocation - ) - } - ` -}); - -export default copyTableToCluster; diff --git a/frontend/src/api/RedshiftCluster/createCluster.js b/frontend/src/api/RedshiftCluster/createCluster.js deleted file mode 100644 index 12c0b80be..000000000 --- a/frontend/src/api/RedshiftCluster/createCluster.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createRedshiftCluster = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - clusterInput: input - }, - mutation: gql` - mutation createRedshiftCluster( - $environmentUri: String! - $clusterInput: NewClusterInput! - ) { - createRedshiftCluster( - environmentUri: $environmentUri - clusterInput: $clusterInput - ) { - clusterUri - name - label - created - } - } - ` -}); - -export default createRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/deleteCluster.js b/frontend/src/api/RedshiftCluster/deleteCluster.js deleted file mode 100644 index 6de330ca0..000000000 --- a/frontend/src/api/RedshiftCluster/deleteCluster.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteRedshiftCluster = (clusterUri, deleteFromAWS) => ({ - variables: { - clusterUri, - deleteFromAWS - }, - mutation: gql` - mutation deleteRedshiftCluster( - $clusterUri: String! - $deleteFromAWS: Boolean - ) { - deleteRedshiftCluster( - clusterUri: $clusterUri - deleteFromAWS: $deleteFromAWS - ) - } - ` -}); - -export default deleteRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/disableClusterDatasetCopy.js b/frontend/src/api/RedshiftCluster/disableClusterDatasetCopy.js deleted file mode 100644 index bc60b24c0..000000000 --- a/frontend/src/api/RedshiftCluster/disableClusterDatasetCopy.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const disableRedshiftClusterDatasetCopy = ({ - clusterUri, - datasetUri, - tableUri -}) => ({ - variables: { clusterUri, datasetUri, tableUri }, - mutation: gql` - mutation disableRedshiftClusterDatasetTableCopy( - $clusterUri: String - $datasetUri: String - $tableUri: String - ) { - disableRedshiftClusterDatasetTableCopy( - clusterUri: $clusterUri - datasetUri: $datasetUri - tableUri: $tableUri - ) - } - ` -}); - -export default disableRedshiftClusterDatasetCopy; diff --git a/frontend/src/api/RedshiftCluster/enableClusterDatasetCopy.js b/frontend/src/api/RedshiftCluster/enableClusterDatasetCopy.js deleted file mode 100644 index b91bd11e6..000000000 --- a/frontend/src/api/RedshiftCluster/enableClusterDatasetCopy.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const enableRedshiftClusterDatasetCopy = ({ clusterUri, datasetUri }) => ({ - variables: { clusterUri, datasetUri }, - mutation: gql` - mutation enableRedshiftClusterDatasetCopy( - $clusterUri: String - $datasetUri: String - ) { - enableRedshiftClusterDatasetCopy( - clusterUri: $clusterUri - datasetUri: $datasetUri - ) - } - ` -}); - -export default enableRedshiftClusterDatasetCopy; diff --git a/frontend/src/api/RedshiftCluster/getCluster.js b/frontend/src/api/RedshiftCluster/getCluster.js deleted file mode 100644 index 6fcd2f7a7..000000000 --- a/frontend/src/api/RedshiftCluster/getCluster.js +++ /dev/null @@ -1,75 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query GetRedshiftCluster($clusterUri: String!) { - getRedshiftCluster(clusterUri: $clusterUri) { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - created - databaseName - databaseUser - datahubSecret - masterUsername - masterSecret - masterDatabaseName - nodeType - numberOfNodes - kmsAlias - status - subnetGroupName - CFNStackName - CFNStackStatus - CFNStackArn - port - endpoint - IAMRoles - subnetIds - vpc - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - SamlGroupName - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - stack { - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - } - } - } - ` -}); - -export default getCluster; diff --git a/frontend/src/api/RedshiftCluster/getClusterConsoleAccess.js b/frontend/src/api/RedshiftCluster/getClusterConsoleAccess.js deleted file mode 100644 index ff0e275d6..000000000 --- a/frontend/src/api/RedshiftCluster/getClusterConsoleAccess.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getClusterConsoleAccess = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query getRedshiftClusterConsoleAccess($clusterUri: String!) { - getRedshiftClusterConsoleAccess(clusterUri: $clusterUri) - } - ` -}); - -export default getClusterConsoleAccess; diff --git a/frontend/src/api/RedshiftCluster/getClusterDatabaseCredentials.js b/frontend/src/api/RedshiftCluster/getClusterDatabaseCredentials.js deleted file mode 100644 index 9c19429f7..000000000 --- a/frontend/src/api/RedshiftCluster/getClusterDatabaseCredentials.js +++ /dev/null @@ -1,21 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getRedshiftClusterDatabaseCredentials = (clusterUri) => ({ - variables: { - clusterUri - }, - query: gql` - query getRedshiftClusterDatabaseCredentials($clusterUri: String!) { - getRedshiftClusterDatabaseCredentials(clusterUri: $clusterUri) { - clusterUri - user - database - port - endpoint - password - } - } - ` -}); - -export default getRedshiftClusterDatabaseCredentials; diff --git a/frontend/src/api/RedshiftCluster/importCluster.js b/frontend/src/api/RedshiftCluster/importCluster.js deleted file mode 100644 index 445f83572..000000000 --- a/frontend/src/api/RedshiftCluster/importCluster.js +++ /dev/null @@ -1,26 +0,0 @@ -import { gql } from 'apollo-boost'; - -const importRedshiftCluster = ({ environmentUri, input }) => ({ - variables: { - environmentUri, - clusterInput: input - }, - mutation: gql` - mutation importRedshiftCluster( - $environmentUri: String! - $clusterInput: ImportClusterInput! - ) { - importRedshiftCluster( - environmentUri: $environmentUri - clusterInput: $clusterInput - ) { - clusterUri - name - label - created - } - } - ` -}); - -export default importRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/listAvailableDatasetTables.js b/frontend/src/api/RedshiftCluster/listAvailableDatasetTables.js deleted file mode 100644 index daea12cf2..000000000 --- a/frontend/src/api/RedshiftCluster/listAvailableDatasetTables.js +++ /dev/null @@ -1,40 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listAvailableDatasetTables = ({ clusterUri, filter }) => ({ - variables: { - clusterUri, - filter - }, - query: gql` - query listRedshiftClusterAvailableDatasetTables( - $clusterUri: String! - $filter: DatasetTableFilter - ) { - listRedshiftClusterAvailableDatasetTables( - clusterUri: $clusterUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - count - nodes { - datasetUri - tableUri - name - label - GlueDatabaseName - GlueTableName - S3Prefix - dataset { - S3BucketName - } - } - } - } - ` -}); - -export default listAvailableDatasetTables; diff --git a/frontend/src/api/RedshiftCluster/listAvailableDatasets.js b/frontend/src/api/RedshiftCluster/listAvailableDatasets.js deleted file mode 100644 index ce817429b..000000000 --- a/frontend/src/api/RedshiftCluster/listAvailableDatasets.js +++ /dev/null @@ -1,59 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listAvailableDatasets = ({ clusterUri, filter }) => ({ - variables: { - clusterUri, - filter - }, - query: gql` - query ListRedshiftClusterAvailableDatasets( - $clusterUri: String! - $filter: RedshiftClusterDatasetFilter - ) { - listRedshiftClusterAvailableDatasets( - clusterUri: $clusterUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - datasetUri - name - AwsAccountId - region - S3BucketName - GlueDatabaseName - created - owner - label - region - tags - userRoleForDataset - redshiftClusterPermission(clusterUri: $clusterUri) - description - organization { - name - organizationUri - label - } - statistics { - tables - locations - } - environment { - environmentUri - name - AwsAccountId - SamlGroupName - region - } - } - } - } - ` -}); - -export default listAvailableDatasets; diff --git a/frontend/src/api/RedshiftCluster/listClusterDatasetTables.js b/frontend/src/api/RedshiftCluster/listClusterDatasetTables.js deleted file mode 100644 index 8f0d52037..000000000 --- a/frontend/src/api/RedshiftCluster/listClusterDatasetTables.js +++ /dev/null @@ -1,40 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listClusterDatasetTables = ({ clusterUri, filter }) => ({ - variables: { - clusterUri, - filter - }, - query: gql` - query listRedshiftClusterCopyEnabledTables( - $clusterUri: String! - $filter: DatasetTableFilter - ) { - listRedshiftClusterCopyEnabledTables( - clusterUri: $clusterUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - count - nodes { - datasetUri - tableUri - name - label - GlueDatabaseName - GlueTableName - S3Prefix - AwsAccountId - RedshiftSchema(clusterUri: $clusterUri) - RedshiftCopyDataLocation(clusterUri: $clusterUri) - } - } - } - ` -}); - -export default listClusterDatasetTables; diff --git a/frontend/src/api/RedshiftCluster/listClusterDatasets.js b/frontend/src/api/RedshiftCluster/listClusterDatasets.js deleted file mode 100644 index 7218d860c..000000000 --- a/frontend/src/api/RedshiftCluster/listClusterDatasets.js +++ /dev/null @@ -1,57 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listClusterDatasets = ({ clusterUri, filter }) => ({ - variables: { - clusterUri, - filter - }, - query: gql` - query ListRedshiftClusterDatasets( - $clusterUri: String! - $filter: RedshiftClusterDatasetFilter - ) { - listRedshiftClusterDatasets(clusterUri: $clusterUri, filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - datasetUri - name - AwsAccountId - region - S3BucketName - GlueDatabaseName - created - owner - label - region - tags - userRoleForDataset - redshiftClusterPermission(clusterUri: $clusterUri) - redshiftDataCopyEnabled(clusterUri: $clusterUri) - description - organization { - name - organizationUri - label - } - statistics { - tables - locations - } - environment { - environmentUri - name - AwsAccountId - SamlGroupName - region - } - } - } - } - ` -}); - -export default listClusterDatasets; diff --git a/frontend/src/api/RedshiftCluster/listEnvironmentClusters.js b/frontend/src/api/RedshiftCluster/listEnvironmentClusters.js deleted file mode 100644 index 54a1f8b23..000000000 --- a/frontend/src/api/RedshiftCluster/listEnvironmentClusters.js +++ /dev/null @@ -1,77 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listEnvironmentClusters = (environmentUri, filter) => ({ - variables: { - environmentUri, - filter - }, - query: gql` - query listEnvironmentClusters( - $environmentUri: String! - $filter: RedshiftClusterFilter - ) { - listEnvironmentClusters( - environmentUri: $environmentUri - filter: $filter - ) { - count - page - pages - hasNext - hasPrevious - nodes { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - created - databaseName - databaseUser - masterUsername - masterDatabaseName - nodeType - numberOfNodes - kmsAlias - status - subnetGroupName - CFNStackName - CFNStackStatus - CFNStackArn - port - endpoint - IAMRoles - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - stack { - status - } - vpc - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - } - } - } - ` -}); - -export default listEnvironmentClusters; diff --git a/frontend/src/api/RedshiftCluster/pauseCluster.js b/frontend/src/api/RedshiftCluster/pauseCluster.js deleted file mode 100644 index e32b1d2fd..000000000 --- a/frontend/src/api/RedshiftCluster/pauseCluster.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const pauseRedshiftCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - mutation: gql` - mutation pauseRedshiftCluster($clusterUri: String!) { - pauseRedshiftCluster(clusterUri: $clusterUri) - } - ` -}); - -export default pauseRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/rebootCluster.js b/frontend/src/api/RedshiftCluster/rebootCluster.js deleted file mode 100644 index ca499e6b1..000000000 --- a/frontend/src/api/RedshiftCluster/rebootCluster.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const rebootRedshiftCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - mutation: gql` - mutation rebootRedshiftCluster($clusterUri: String!) { - rebootRedshiftCluster(clusterUri: $clusterUri) - } - ` -}); - -export default rebootRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/removeDatasetFromCluster.js b/frontend/src/api/RedshiftCluster/removeDatasetFromCluster.js deleted file mode 100644 index 3da8fa94d..000000000 --- a/frontend/src/api/RedshiftCluster/removeDatasetFromCluster.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeDatasetFromCluster = ({ clusterUri, datasetUri }) => ({ - variables: { clusterUri, datasetUri }, - mutation: gql` - mutation removeDatasetFromRedshiftCluster( - $clusterUri: String - $datasetUri: String - ) { - removeDatasetFromRedshiftCluster( - clusterUri: $clusterUri - datasetUri: $datasetUri - ) - } - ` -}); - -export default removeDatasetFromCluster; diff --git a/frontend/src/api/RedshiftCluster/resumeCluster.js b/frontend/src/api/RedshiftCluster/resumeCluster.js deleted file mode 100644 index fc9fae8a6..000000000 --- a/frontend/src/api/RedshiftCluster/resumeCluster.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const resumeRedshiftCluster = (clusterUri) => ({ - variables: { - clusterUri - }, - mutation: gql` - mutation resumeRedshiftCluster($clusterUri: String!) { - resumeRedshiftCluster(clusterUri: $clusterUri) - } - ` -}); - -export default resumeRedshiftCluster; diff --git a/frontend/src/api/RedshiftCluster/searchClusters.js b/frontend/src/api/RedshiftCluster/searchClusters.js deleted file mode 100644 index ca8df03a7..000000000 --- a/frontend/src/api/RedshiftCluster/searchClusters.js +++ /dev/null @@ -1,70 +0,0 @@ -import { gql } from 'apollo-boost'; - -const searchRedshiftClusters = (filter) => ({ - variables: { - filter - }, - query: gql` - query searchRedshiftClusters($filter: RedshiftClusterFilter) { - searchRedshiftClusters(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - clusterUri - environmentUri - name - label - description - tags - owner - created - updated - AwsAccountId - region - clusterArn - clusterName - created - databaseName - databaseUser - masterUsername - masterDatabaseName - nodeType - numberOfNodes - kmsAlias - status - subnetGroupName - CFNStackName - CFNStackStatus - CFNStackArn - port - endpoint - IAMRoles - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - imported - stack { - status - } - vpc - organization { - organizationUri - label - name - } - environment { - environmentUri - label - name - } - } - } - } - ` -}); - -export default searchRedshiftClusters; diff --git a/frontend/src/api/SagemakerNotebook/getSagemakerNotebookPresignedUrl.js b/frontend/src/api/SagemakerNotebook/getSagemakerNotebookPresignedUrl.js deleted file mode 100644 index cd2ccce35..000000000 --- a/frontend/src/api/SagemakerNotebook/getSagemakerNotebookPresignedUrl.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getSagemakerNotebookPresignedUrl = (notebookUri) => ({ - variables: { - notebookUri - }, - query: gql` - query getSagemakerNotebookPresignedUrl($notebookUri: String!) { - getSagemakerNotebookPresignedUrl(notebookUri: $notebookUri) - } - ` -}); - -export default getSagemakerNotebookPresignedUrl; diff --git a/frontend/src/api/SagemakerNotebook/startNotebookInstance.js b/frontend/src/api/SagemakerNotebook/startNotebookInstance.js deleted file mode 100644 index 83a970aca..000000000 --- a/frontend/src/api/SagemakerNotebook/startNotebookInstance.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const startSagemakerNotebook = (notebookUri) => ({ - variables: { - notebookUri - }, - mutation: gql` - mutation StartSagemakerNotebook($notebookUri: String!) { - startSagemakerNotebook(notebookUri: $notebookUri) - } - ` -}); - -export default startSagemakerNotebook; diff --git a/frontend/src/api/SagemakerNotebook/stopNotebookInstance.js b/frontend/src/api/SagemakerNotebook/stopNotebookInstance.js deleted file mode 100644 index 6dbad845c..000000000 --- a/frontend/src/api/SagemakerNotebook/stopNotebookInstance.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const stopSagemakerNotebook = (notebookUri) => ({ - variables: { - notebookUri - }, - mutation: gql` - mutation StopSagemakerNotebook($notebookUri: String!) { - stopSagemakerNotebook(notebookUri: $notebookUri) - } - ` -}); - -export default stopSagemakerNotebook; diff --git a/frontend/src/api/SagemakerStudio/createSagemakerStudioUserProfile.js b/frontend/src/api/SagemakerStudio/createSagemakerStudioUserProfile.js deleted file mode 100644 index 968312bac..000000000 --- a/frontend/src/api/SagemakerStudio/createSagemakerStudioUserProfile.js +++ /dev/null @@ -1,23 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createSagemakerStudioUserProfile = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation createSagemakerStudioUserProfile( - $input: NewSagemakerStudioUserProfileInput - ) { - createSagemakerStudioUserProfile(input: $input) { - sagemakerStudioUserProfileUri - name - label - created - description - tags - } - } - ` -}); - -export default createSagemakerStudioUserProfile; diff --git a/frontend/src/api/SagemakerStudio/deleteSagemakerStudioUserProfile.js b/frontend/src/api/SagemakerStudio/deleteSagemakerStudioUserProfile.js deleted file mode 100644 index b72d2f949..000000000 --- a/frontend/src/api/SagemakerStudio/deleteSagemakerStudioUserProfile.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteSagemakerStudioUserProfile = ( - sagemakerStudioUserProfileUri, - deleteFromAWS -) => ({ - variables: { - sagemakerStudioUserProfileUri, - deleteFromAWS - }, - mutation: gql` - mutation deleteSagemakerStudioUserProfile( - $sagemakerStudioUserProfileUri: String! - $deleteFromAWS: Boolean - ) { - deleteSagemakerStudioUserProfile( - sagemakerStudioUserProfileUri: $sagemakerStudioUserProfileUri - deleteFromAWS: $deleteFromAWS - ) - } - ` -}); - -export default deleteSagemakerStudioUserProfile; diff --git a/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfile.js b/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfile.js deleted file mode 100644 index 1adb5acb2..000000000 --- a/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfile.js +++ /dev/null @@ -1,61 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getSagemakerStudioUserProfile = (sagemakerStudioUserProfileUri) => ({ - variables: { - sagemakerStudioUserProfileUri - }, - query: gql` - query getSagemakerStudioUserProfile( - $sagemakerStudioUserProfileUri: String! - ) { - getSagemakerStudioUserProfile( - sagemakerStudioUserProfileUri: $sagemakerStudioUserProfileUri - ) { - sagemakerStudioUserProfileUri - name - owner - description - label - created - tags - userRoleForSagemakerStudioUserProfile - sagemakerStudioUserProfileStatus - SamlAdminGroupName - sagemakerStudioUserProfileApps { - DomainId - UserProfileName - AppType - AppName - Status - } - environment { - label - name - environmentUri - AwsAccountId - region - EnvironmentDefaultIAMRoleArn - } - organization { - label - name - organizationUri - } - stack { - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - } - } - } - ` -}); - -export default getSagemakerStudioUserProfile; diff --git a/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfilePresignedUrl.js b/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfilePresignedUrl.js deleted file mode 100644 index ea40cb8e5..000000000 --- a/frontend/src/api/SagemakerStudio/getSagemakerStudioUserProfilePresignedUrl.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getSagemakerStudioUserProfilePresignedUrl = ( - sagemakerStudioUserProfileUri -) => ({ - variables: { - sagemakerStudioUserProfileUri - }, - query: gql` - query getSagemakerStudioUserProfilePresignedUrl( - $sagemakerStudioUserProfileUri: String! - ) { - getSagemakerStudioUserProfilePresignedUrl( - sagemakerStudioUserProfileUri: $sagemakerStudioUserProfileUri - ) - } - ` -}); - -export default getSagemakerStudioUserProfilePresignedUrl; diff --git a/frontend/src/api/SagemakerStudio/listSagemakerStudioUserProfiles.js b/frontend/src/api/SagemakerStudio/listSagemakerStudioUserProfiles.js deleted file mode 100644 index 9d9f88cff..000000000 --- a/frontend/src/api/SagemakerStudio/listSagemakerStudioUserProfiles.js +++ /dev/null @@ -1,50 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listSagemakerStudioUserProfiles = (filter) => ({ - variables: { - filter - }, - query: gql` - query listSagemakerStudioUserProfiles( - $filter: SagemakerStudioUserProfileFilter - ) { - listSagemakerStudioUserProfiles(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - sagemakerStudioUserProfileUri - name - owner - description - label - created - tags - sagemakerStudioUserProfileStatus - userRoleForSagemakerStudioUserProfile - environment { - label - name - environmentUri - AwsAccountId - region - SamlGroupName - } - organization { - label - name - organizationUri - } - stack { - stack - status - } - } - } - } - ` -}); - -export default listSagemakerStudioUserProfiles; diff --git a/frontend/src/api/SavedQuery/deployScheduledQuery.js b/frontend/src/api/SavedQuery/deployScheduledQuery.js deleted file mode 100644 index 9028d3d90..000000000 --- a/frontend/src/api/SavedQuery/deployScheduledQuery.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deployScheduledQuery = (scheduledQueryUri) => ({ - variables: { - scheduledQueryUri - }, - mutation: gql` - mutation DeployScheduledQuery($scheduledQueryUri: String!) { - deployScheduledQuery(scheduledQueryUri: $scheduledQueryUri) - } - ` -}); - -export default deployScheduledQuery; diff --git a/frontend/src/api/SavedQuery/listScheduledQueryExecutions.js.bak b/frontend/src/api/SavedQuery/listScheduledQueryExecutions.js.bak deleted file mode 100644 index 82e7812b6..000000000 --- a/frontend/src/api/SavedQuery/listScheduledQueryExecutions.js.bak +++ /dev/null @@ -1,25 +0,0 @@ -import { gql } from "apollo-boost"; - -const listScheduledQueryExecutions = (scheduledQueryUri)=>{ - return { - variables:{ - scheduledQueryUri:scheduledQueryUri - }, - query :gql` - query ListScheduledQueryExecutions( - $scheduledQueryUri:String!){ - listScheduledQueryExecutions( - scheduledQueryUri:$scheduledQueryUri - ){ - executionArn - status - startDate - stopDate - } - } - ` - } -} - - -export default listScheduledQueryExecutions ; diff --git a/frontend/src/api/SavedQuery/removeSavedQuery.js b/frontend/src/api/SavedQuery/removeSavedQuery.js deleted file mode 100644 index 52a130ed6..000000000 --- a/frontend/src/api/SavedQuery/removeSavedQuery.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeSavedQuery = (queryUri) => ({ - variables: { - queryUri - }, - mutation: gql` - mutation RemoveSavedQuery($queryUri: String!) { - removeSavedQuery(savedQueryUri: $queryUri) - } - ` -}); - -export default removeSavedQuery; diff --git a/frontend/src/api/SavedQuery/runScheduledQuery.js b/frontend/src/api/SavedQuery/runScheduledQuery.js deleted file mode 100644 index 871f2efb6..000000000 --- a/frontend/src/api/SavedQuery/runScheduledQuery.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const runScheduledQuery = (scheduledQueryUri) => ({ - variables: { - scheduledQueryUri - }, - mutation: gql` - mutation RunScheduledQuery($scheduledQueryUri: String!) { - runScheduledQuery(scheduledQueryUri: $scheduledQueryUri) - } - ` -}); - -export default runScheduledQuery; diff --git a/frontend/src/api/SavedQuery/runScheduledQuery.js.bak b/frontend/src/api/SavedQuery/runScheduledQuery.js.bak deleted file mode 100644 index 835eb7edd..000000000 --- a/frontend/src/api/SavedQuery/runScheduledQuery.js.bak +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from "apollo-boost"; - -const runScheduledQuery = (scheduledQueryUri)=>{ - return { - variables:{ - scheduledQueryUri:scheduledQueryUri - }, - mutation :gql` - mutation RunScheduledQuery( - $scheduledQueryUri:String!){ - runScheduledQuery( - scheduledQueryUri:$scheduledQueryUri - ) - } - ` - } -} - - -export default runScheduledQuery ; diff --git a/frontend/src/api/ShareObject/addSharedItem.js b/frontend/src/api/ShareObject/addSharedItem.js deleted file mode 100644 index 8c12845cd..000000000 --- a/frontend/src/api/ShareObject/addSharedItem.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const addSharedItem = ({ shareUri, input }) => { - console.log('rcv', input); - return { - variables: { - shareUri, - input - }, - mutation: gql` - mutation AddSharedItem($shareUri: String!, $input: AddSharedItemInput!) { - addSharedItem(shareUri: $shareUri, input: $input) { - shareItemUri - } - } - ` - }; -}; - -export default addSharedItem; diff --git a/frontend/src/api/ShareObject/approveShareObject.js b/frontend/src/api/ShareObject/approveShareObject.js deleted file mode 100644 index 73d32768a..000000000 --- a/frontend/src/api/ShareObject/approveShareObject.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const approveShareObject = ({ shareUri }) => ({ - variables: { - shareUri - }, - mutation: gql` - mutation approveShareObject($shareUri: String!) { - approveShareObject(shareUri: $shareUri) { - shareUri - status - } - } - ` -}); - -export default approveShareObject; diff --git a/frontend/src/api/ShareObject/deleteShareObject.js b/frontend/src/api/ShareObject/deleteShareObject.js deleted file mode 100644 index 2678a3fd8..000000000 --- a/frontend/src/api/ShareObject/deleteShareObject.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteShareObject = ({ shareUri }) => ({ - variables: { - shareUri - }, - mutation: gql` - mutation DeleteShareObject($shareUri: String!) { - deleteShareObject(shareUri: $shareUri) - } - ` -}); - -export default deleteShareObject; diff --git a/frontend/src/api/ShareObject/listDatasetShareObjects.js b/frontend/src/api/ShareObject/listDatasetShareObjects.js deleted file mode 100644 index 7859c090f..000000000 --- a/frontend/src/api/ShareObject/listDatasetShareObjects.js +++ /dev/null @@ -1,51 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listDatasetShareObjects = ({ datasetUri, filter }) => ({ - variables: { - datasetUri, - filter - }, - query: gql` - query ListDatasetShareObjects( - $datasetUri: String! - $filter: ShareObjectFilter - ) { - getDataset(datasetUri: $datasetUri) { - shares(filter: $filter) { - page - pages - pageSize - hasPrevious - hasNext - count - nodes { - shareUri - created - owner - status - userRoleForShareObject - statistics { - tables - locations - } - principal { - principalId - principalType - principalName - principalIAMRoleName - SamlGroupName - environmentUri - environmentName - AwsAccountId - region - organizationUri - organizationName - } - } - } - } - } - ` -}); - -export default listDatasetShareObjects; diff --git a/frontend/src/api/ShareObject/rejectShareObject.js b/frontend/src/api/ShareObject/rejectShareObject.js deleted file mode 100644 index 5fb45dfee..000000000 --- a/frontend/src/api/ShareObject/rejectShareObject.js +++ /dev/null @@ -1,18 +0,0 @@ -import { gql } from 'apollo-boost'; - -const rejectShareObject = ({ shareUri, rejectPurpose }) => ({ - variables: { - shareUri, - rejectPurpose - }, - mutation: gql` - mutation RejectShareObject($shareUri: String!, $rejectPurpose: String!) { - rejectShareObject(shareUri: $shareUri,rejectPurpose: $rejectPurpose) { - shareUri - status - } - } - ` -}); - -export default rejectShareObject; diff --git a/frontend/src/api/ShareObject/removeSharedItem.js b/frontend/src/api/ShareObject/removeSharedItem.js deleted file mode 100644 index 2174017a5..000000000 --- a/frontend/src/api/ShareObject/removeSharedItem.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeSharedItem = ({ shareItemUri }) => ({ - variables: { - shareItemUri - }, - mutation: gql` - mutation RemoveSharedItem($shareItemUri: String!) { - removeSharedItem(shareItemUri: $shareItemUri) - } - ` -}); - -export default removeSharedItem; diff --git a/frontend/src/api/ShareObject/revokeItemsShareObject.js b/frontend/src/api/ShareObject/revokeItemsShareObject.js deleted file mode 100644 index 0cf82c1f3..000000000 --- a/frontend/src/api/ShareObject/revokeItemsShareObject.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const revokeItemsShareObject = ({ input }) => ({ - variables: { - input - }, - mutation: gql` - mutation revokeItemsShareObject($input: RevokeItemsInput) { - revokeItemsShareObject(input: $input) { - shareUri - status - } - } - ` -}); - -export default revokeItemsShareObject; diff --git a/frontend/src/api/ShareObject/updateShareRejectReason.js b/frontend/src/api/ShareObject/updateShareRejectReason.js deleted file mode 100644 index 32df0c67e..000000000 --- a/frontend/src/api/ShareObject/updateShareRejectReason.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateShareRejectReason = ({ shareUri, rejectPurpose }) => ({ - variables: { - shareUri, - rejectPurpose - }, - mutation: gql` - mutation updateShareRejectReason($shareUri: String!,$rejectPurpose: String!) { - updateShareRejectReason(shareUri: $shareUri, rejectPurpose: $rejectPurpose) - } - ` -}); - -export default updateShareRejectReason; diff --git a/frontend/src/api/ShareObject/updateShareRequestReason.js b/frontend/src/api/ShareObject/updateShareRequestReason.js deleted file mode 100644 index 6bf4dd0dc..000000000 --- a/frontend/src/api/ShareObject/updateShareRequestReason.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateShareRequestReason = ({ shareUri, requestPurpose }) => ({ - variables: { - shareUri, - requestPurpose - }, - mutation: gql` - mutation updateShareRequestReason($shareUri: String!, $requestPurpose: String!) { - updateShareRequestReason(shareUri: $shareUri, requestPurpose: $requestPurpose) - } - ` -}); - -export default updateShareRequestReason; diff --git a/frontend/src/api/Tenant/createQuicksightDataSourceSet.js b/frontend/src/api/Tenant/createQuicksightDataSourceSet.js deleted file mode 100644 index f7be6e031..000000000 --- a/frontend/src/api/Tenant/createQuicksightDataSourceSet.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const createQuicksightDataSourceSet = ({vpcConnectionId}) => ({ - variables: { - vpcConnectionId - }, - mutation: gql` - mutation createQuicksightDataSourceSet ($vpcConnectionId: String!) { - createQuicksightDataSourceSet(vpcConnectionId: $vpcConnectionId) - } - ` -}); - -export default createQuicksightDataSourceSet; diff --git a/frontend/src/api/Tenant/getMonitoringDashboardId.js b/frontend/src/api/Tenant/getMonitoringDashboardId.js deleted file mode 100644 index d7a074124..000000000 --- a/frontend/src/api/Tenant/getMonitoringDashboardId.js +++ /dev/null @@ -1,11 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getMonitoringDashboardId = () => ({ - query: gql` - query getMonitoringDashboardId { - getMonitoringDashboardId - } - ` -}); - -export default getMonitoringDashboardId; diff --git a/frontend/src/api/Tenant/getMonitoringVPCConnectionId.js b/frontend/src/api/Tenant/getMonitoringVPCConnectionId.js deleted file mode 100644 index 568e5b713..000000000 --- a/frontend/src/api/Tenant/getMonitoringVPCConnectionId.js +++ /dev/null @@ -1,11 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getMonitoringVPCConnectionId = () => ({ - query: gql` - query getMonitoringVPCConnectionId { - getMonitoringVPCConnectionId - } - ` -}); - -export default getMonitoringVPCConnectionId; diff --git a/frontend/src/api/Tenant/getPlatformAuthorSession.js b/frontend/src/api/Tenant/getPlatformAuthorSession.js deleted file mode 100644 index c2b7728ac..000000000 --- a/frontend/src/api/Tenant/getPlatformAuthorSession.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getPlatformAuthorSession = (awsAccount) => ({ - variables: { - awsAccount - }, - query: gql` - query getPlatformAuthorSession($awsAccount: String) { - getPlatformAuthorSession( - awsAccount: $awsAccount - ) - } - ` -}); - -export default getPlatformAuthorSession; diff --git a/frontend/src/api/Tenant/getPlatformReaderSession.js b/frontend/src/api/Tenant/getPlatformReaderSession.js deleted file mode 100644 index 206bf2194..000000000 --- a/frontend/src/api/Tenant/getPlatformReaderSession.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getPlatformReaderSession = (dashboardId) => ({ - variables: { - dashboardId - }, - query: gql` - query getPlatformReaderSession($dashboardId: String) { - getPlatformReaderSession(dashboardId: $dashboardId) - } - ` -}); - -export default getPlatformReaderSession; diff --git a/frontend/src/api/Tenant/getUserRoleInTenant.js b/frontend/src/api/Tenant/getUserRoleInTenant.js deleted file mode 100644 index cda9e8fa2..000000000 --- a/frontend/src/api/Tenant/getUserRoleInTenant.js +++ /dev/null @@ -1,11 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getUserRoleInTenant = () => ({ - query: gql` - query GetUserRoleInTenant { - getUserRoleInTenant - } - ` -}); - -export default getUserRoleInTenant; diff --git a/frontend/src/api/Tenant/listTenantPermissions.js b/frontend/src/api/Tenant/listTenantPermissions.js deleted file mode 100644 index ac234f23e..000000000 --- a/frontend/src/api/Tenant/listTenantPermissions.js +++ /dev/null @@ -1,17 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listTenantPermissions = (filter) => ({ - variables: { - filter - }, - query: gql` - query listTenantPermissions { - listTenantPermissions { - name - description - } - } - ` -}); - -export default listTenantPermissions; diff --git a/frontend/src/api/Tenant/updateSSMParameter.js b/frontend/src/api/Tenant/updateSSMParameter.js deleted file mode 100644 index 731c3465f..000000000 --- a/frontend/src/api/Tenant/updateSSMParameter.js +++ /dev/null @@ -1,15 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateSSMParameter = ({name,value}) => ({ - variables: { - name, - value - }, - mutation: gql` - mutation updateSSMParameter ($name: String!, $value: String!) { - updateSSMParameter(name: $name, value: $value) - } - ` -}); - -export default updateSSMParameter; diff --git a/frontend/src/api/Tenant/updateTenantGroupPermissions.js b/frontend/src/api/Tenant/updateTenantGroupPermissions.js deleted file mode 100644 index feb27fad5..000000000 --- a/frontend/src/api/Tenant/updateTenantGroupPermissions.js +++ /dev/null @@ -1,16 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeTenantAdministrator = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation updateGroupTenantPermissions( - $input: UpdateGroupTenantPermissionsInput! - ) { - updateGroupTenantPermissions(input: $input) - } - ` -}); - -export default removeTenantAdministrator; diff --git a/frontend/src/api/Test/test.js b/frontend/src/api/Test/test.js deleted file mode 100644 index dceef7f18..000000000 --- a/frontend/src/api/Test/test.js +++ /dev/null @@ -1,11 +0,0 @@ -import { gql } from 'apollo-boost'; - -const test = () => ({ - query: gql` - query Test { - test - } - ` -}); - -export default test; diff --git a/frontend/src/api/UserProfile/getUserProfile.js b/frontend/src/api/UserProfile/getUserProfile.js deleted file mode 100644 index 413a29cda..000000000 --- a/frontend/src/api/UserProfile/getUserProfile.js +++ /dev/null @@ -1,19 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getUserProfile = (username) => ({ - variables: { - username - }, - query: gql` - query GetUserProfile($username: String) { - getUserProfile(username: $username) { - username - bio - b64EncodedAvatar - tags - } - } - ` -}); - -export default getUserProfile; diff --git a/frontend/src/api/UserProfile/updateUserProfile.js b/frontend/src/api/UserProfile/updateUserProfile.js deleted file mode 100644 index 5d9bb3633..000000000 --- a/frontend/src/api/UserProfile/updateUserProfile.js +++ /dev/null @@ -1,19 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateUserProfile = (input) => ({ - variables: { - input - }, - mutation: gql` - mutation UpdateUserProfile($input: UserProfileInput!) { - updateUserProfile(input: $input) { - username - bio - b64EncodedAvatar - tags - } - } - ` -}); - -export default updateUserProfile; diff --git a/frontend/src/api/Vpc/deleteNetwork.js b/frontend/src/api/Vpc/deleteNetwork.js deleted file mode 100644 index 82be29987..000000000 --- a/frontend/src/api/Vpc/deleteNetwork.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteNetwork = ({ vpcUri }) => ({ - variables: { - vpcUri - }, - mutation: gql` - mutation deleteNetwork($vpcUri: String!) { - deleteNetwork(vpcUri: $vpcUri) - } - ` -}); - -export default deleteNetwork; diff --git a/frontend/src/api/Vpc/getNetwork.js b/frontend/src/api/Vpc/getNetwork.js deleted file mode 100644 index d1bab87f2..000000000 --- a/frontend/src/api/Vpc/getNetwork.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const getNetwork = (vpcUri) => ({ - variables: { - vpcUri - }, - query: gql` - query getNetwork($vpcUri: String!) { - getNetwork(vpcUri: $vpcUri) { - vpcUri - VpcId - label - description - tags - owner - SamlGroupName - privateSubnetIds - privateSubnetIds - } - } - ` -}); - -export default getNetwork; diff --git a/frontend/src/api/Worksheet/deleteWorksheet.js b/frontend/src/api/Worksheet/deleteWorksheet.js deleted file mode 100644 index 2f6e377e3..000000000 --- a/frontend/src/api/Worksheet/deleteWorksheet.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const deleteWorksheet = (worksheetUri) => ({ - variables: { - worksheetUri - }, - mutation: gql` - mutation deleteWorksheet($worksheetUri: String!) { - deleteWorksheet(worksheetUri: $worksheetUri) - } - ` -}); - -export default deleteWorksheet; diff --git a/frontend/src/api/Worksheet/index.js b/frontend/src/api/Worksheet/index.js deleted file mode 100644 index 7d6c7fc01..000000000 --- a/frontend/src/api/Worksheet/index.js +++ /dev/null @@ -1,23 +0,0 @@ -import listWorksheets from './listWorksheets'; -import createWorksheet from './createWorksheet'; -import updateWorksheet from './updateWorksheet'; -import getWorksheet from './getWorksheet'; -import listWorksheetShares from './listWorksheetShares'; -import shareWorksheet from './shareWorksheet'; -import removeWorksheetShare from './removeWorksheetShare'; -import updateWorksheetShare from './updateWorksheetShare'; -import deleteWorksheet from './deleteWorksheet'; -import runAthenaSqlQuery from './runAthenaSqlQuery'; - -export { - listWorksheets, - createWorksheet, - runAthenaSqlQuery, - updateWorksheet, - getWorksheet, - listWorksheetShares, - shareWorksheet, - removeWorksheetShare, - updateWorksheetShare, - deleteWorksheet -}; diff --git a/frontend/src/api/Worksheet/listWorksheetShares.js b/frontend/src/api/Worksheet/listWorksheetShares.js deleted file mode 100644 index e14da5702..000000000 --- a/frontend/src/api/Worksheet/listWorksheetShares.js +++ /dev/null @@ -1,28 +0,0 @@ -import { gql } from 'apollo-boost'; - -const listWorksheetShares = ({ worksheetUri, filter }) => ({ - variables: { - worksheetUri, - filter - }, - query: gql` - query GetWorksheet($worksheetUri: String!, $filter: WorksheetFilter) { - getWorksheet(worksheetUri: $worksheetUri) { - shares(filter: $filter) { - count - page - pages - hasNext - hasPrevious - nodes { - worksheetShareUri - principalId - principalType - } - } - } - } - ` -}); - -export default listWorksheetShares; diff --git a/frontend/src/api/Worksheet/removeWorksheetShare.js b/frontend/src/api/Worksheet/removeWorksheetShare.js deleted file mode 100644 index f4d82f036..000000000 --- a/frontend/src/api/Worksheet/removeWorksheetShare.js +++ /dev/null @@ -1,14 +0,0 @@ -import { gql } from 'apollo-boost'; - -const removeWorksheetShare = (worksheetShareUri) => ({ - variables: { - worksheetShareUri - }, - mutation: gql` - mutation RemoveWorksheetShare($worksheetShareUri: String!) { - removeWorksheetShare(worksheetShareUri: $worksheetShareUri) - } - ` -}); - -export default removeWorksheetShare; diff --git a/frontend/src/api/Worksheet/runAthenaSqlQuery.js b/frontend/src/api/Worksheet/runAthenaSqlQuery.js deleted file mode 100644 index 8c69c3b93..000000000 --- a/frontend/src/api/Worksheet/runAthenaSqlQuery.js +++ /dev/null @@ -1,28 +0,0 @@ -import { gql } from 'apollo-boost'; - -const runAthenaSqlQuery = ({ sqlQuery, environmentUri, worksheetUri }) => ({ - variables: { - sqlQuery, - environmentUri, - worksheetUri - }, - query: gql` - query runAthenaSqlQuery($environmentUri: String!, $worksheetUri: String!, $sqlQuery: String!) { - runAthenaSqlQuery(environmentUri: $environmentUri, worksheetUri: $worksheetUri, sqlQuery: $sqlQuery) { - rows { - cells { - columnName - typeName - value - } - } - columns { - columnName - typeName - } - } - } - ` -}); - -export default runAthenaSqlQuery; diff --git a/frontend/src/api/Worksheet/shareWorksheet.js b/frontend/src/api/Worksheet/shareWorksheet.js deleted file mode 100644 index 3d6278aea..000000000 --- a/frontend/src/api/Worksheet/shareWorksheet.js +++ /dev/null @@ -1,20 +0,0 @@ -import { gql } from 'apollo-boost'; - -const shareWorksheet = ({ worksheetUri, input }) => ({ - variables: { - worksheetUri, - input - }, - mutation: gql` - mutation ShareWorksheet( - $worksheetUri: String! - $input: WorksheetShareInput! - ) { - shareWorksheet(worksheetUri: $worksheetUri, input: $input) { - worksheetShareUri - } - } - ` -}); - -export default shareWorksheet; diff --git a/frontend/src/api/Worksheet/startWorksheetQuery.js b/frontend/src/api/Worksheet/startWorksheetQuery.js deleted file mode 100644 index 2e70ea7e1..000000000 --- a/frontend/src/api/Worksheet/startWorksheetQuery.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const startWorksheetQuery = ({ worksheetUri, input }) => ({ - variables: { - worksheetUri, - input - }, - mutation: gql` - mutation StartWorksheetQuery( - $worksheetUri: String! - $input: WorksheetQueryInput! - ) { - startWorksheetQuery(worksheetUri: $worksheetUri, input: $input) { - AthenaQueryId - Error - Status - DataScannedInBytes - ElapsedTimeInMs - } - } - ` -}); - -export default startWorksheetQuery; diff --git a/frontend/src/api/Worksheet/updateWorksheetShare.js b/frontend/src/api/Worksheet/updateWorksheetShare.js deleted file mode 100644 index 557bb6aae..000000000 --- a/frontend/src/api/Worksheet/updateWorksheetShare.js +++ /dev/null @@ -1,24 +0,0 @@ -import { gql } from 'apollo-boost'; - -const updateWorksheetShare = ({ worksheetShareUri, canEdit }) => ({ - variables: { - worksheetShareUri, - canEdit - }, - mutation: gql` - mutation RemoveWorksheetShare( - $worksheetShareUri: String! - $canEdit: Boolean - ) { - updateWorksheetShare( - worksheetShareUri: $worksheetShareUri - canEdit: $canEdit - ) { - worksheetShareUri - canEdit - } - } - ` -}); - -export default updateWorksheetShare; diff --git a/frontend/src/components/AuthGuard.js b/frontend/src/authentication/components/AuthGuard.js similarity index 82% rename from frontend/src/components/AuthGuard.js rename to frontend/src/authentication/components/AuthGuard.js index 23cf18da3..73f280943 100644 --- a/frontend/src/components/AuthGuard.js +++ b/frontend/src/authentication/components/AuthGuard.js @@ -1,10 +1,10 @@ +import PropTypes from 'prop-types'; import { useState } from 'react'; import { Navigate, useLocation } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import useAuth from '../hooks/useAuth'; -import Login from '../views/authentication/Login'; +import { Login } from '../views/Login'; +import { useAuth } from '../hooks'; -const AuthGuard = (props) => { +export const AuthGuard = (props) => { const { children } = props; const auth = useAuth(); const location = useLocation(); @@ -29,5 +29,3 @@ const AuthGuard = (props) => { AuthGuard.propTypes = { children: PropTypes.node }; - -export default AuthGuard; diff --git a/frontend/src/authentication/components/GuestGuard.js b/frontend/src/authentication/components/GuestGuard.js new file mode 100644 index 000000000..99e35accb --- /dev/null +++ b/frontend/src/authentication/components/GuestGuard.js @@ -0,0 +1,17 @@ +import PropTypes from 'prop-types'; +import { Navigate } from 'react-router-dom'; +import { useAuth } from '../hooks'; + +export const GuestGuard = ({ children }) => { + const { isAuthenticated } = useAuth(); + + if (isAuthenticated) { + return ; + } + + return <>{children}; +}; + +GuestGuard.propTypes = { + children: PropTypes.node +}; diff --git a/frontend/src/authentication/components/LoginAmplify.js b/frontend/src/authentication/components/LoginAmplify.js new file mode 100644 index 000000000..8d8f62ffc --- /dev/null +++ b/frontend/src/authentication/components/LoginAmplify.js @@ -0,0 +1,19 @@ +import { Button } from '@mui/material'; +import { useAuth } from '../hooks'; + +export const LoginAmplify = () => { + const { login } = useAuth(); + + return ( + + ); +}; diff --git a/frontend/src/authentication/components/index.js b/frontend/src/authentication/components/index.js new file mode 100644 index 000000000..52807e867 --- /dev/null +++ b/frontend/src/authentication/components/index.js @@ -0,0 +1,3 @@ +export * from './AuthGuard'; +export * from './GuestGuard'; +export * from './LoginAmplify'; diff --git a/frontend/src/authentication/contexts/AuthContext.js b/frontend/src/authentication/contexts/AuthContext.js new file mode 100644 index 000000000..5c7a5679a --- /dev/null +++ b/frontend/src/authentication/contexts/AuthContext.js @@ -0,0 +1,6 @@ +import { CognitoAuthProvider } from './CognitoAuthContext'; +import { LocalAuthProvider } from './LocalAuthContext'; + +export const AuthProvider = !process.env.REACT_APP_COGNITO_USER_POOL_ID + ? LocalAuthProvider + : CognitoAuthProvider; diff --git a/frontend/src/authentication/contexts/CognitoAuthContext.js b/frontend/src/authentication/contexts/CognitoAuthContext.js new file mode 100644 index 000000000..55097a405 --- /dev/null +++ b/frontend/src/authentication/contexts/CognitoAuthContext.js @@ -0,0 +1,145 @@ +import { Auth, Amplify } from 'aws-amplify'; +import PropTypes from 'prop-types'; +import { createContext, useEffect, useReducer } from 'react'; +import { SET_ERROR } from 'globalErrors'; + +Amplify.configure({ + Auth: { + mandatorySignIn: true, + region: process.env.REACT_APP_COGNITO_REGION, + userPoolId: process.env.REACT_APP_COGNITO_USER_POOL_ID, + userPoolWebClientId: process.env.REACT_APP_COGNITO_APP_CLIENT_ID, + redirectSignIn: process.env.REACT_APP_COGNITO_REDIRECT_SIGNIN, + redirectSignOut: process.env.REACT_APP_COGNITO_REDIRECT_SIGNOUT + } +}); + +Auth.configure({ + oauth: { + domain: process.env.REACT_APP_COGNITO_DOMAIN, + redirectSignIn: process.env.REACT_APP_COGNITO_REDIRECT_SIGNIN, + redirectSignOut: process.env.REACT_APP_COGNITO_REDIRECT_SIGNOUT, + responseType: 'code' + } +}); + +const initialState = { + isAuthenticated: false, + isInitialized: false, + user: null +}; + +const handlers = { + INITIALIZE: (state, action) => { + const { isAuthenticated, user } = action.payload; + + return { + ...state, + isAuthenticated, + isInitialized: true, + user + }; + }, + LOGIN: (state, action) => { + const { user } = action.payload; + + return { + ...state, + isAuthenticated: true, + user + }; + }, + LOGOUT: (state) => ({ + ...state, + isAuthenticated: false, + user: null + }) +}; + +const reducer = (state, action) => + handlers[action.type] ? handlers[action.type](state, action) : state; + +export const CognitoAuthContext = createContext({ + ...initialState, + platform: 'Amplify', + login: () => Promise.resolve(), + logout: () => Promise.resolve() +}); + +export const CognitoAuthProvider = (props) => { + const { children } = props; + const [state, dispatch] = useReducer(reducer, initialState); + + useEffect(() => { + const initialize = async () => { + try { + const user = await Auth.currentAuthenticatedUser(); + dispatch({ + type: 'INITIALIZE', + payload: { + isAuthenticated: true, + user: { + id: user.attributes.email, + email: user.attributes.email, + name: user.attributes.email + } + } + }); + } catch (error) { + dispatch({ + type: 'INITIALIZE', + payload: { + isAuthenticated: false, + user: null + } + }); + } + }; + + initialize().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); + }, []); + + const login = async () => { + Auth.federatedSignIn() + .then((user) => { + dispatch({ + type: 'LOGIN', + payload: { + user: { + id: user.attributes.email, + email: user.attributes.email, + name: user.attributes.email + } + } + }); + }) + .catch((e) => { + console.error('Failed to authenticate user', e); + }); + }; + + const logout = async () => { + await Auth.signOut(); + dispatch({ + type: 'LOGOUT' + }); + }; + + return ( + + {children} + + ); +}; + +CognitoAuthProvider.propTypes = { + children: PropTypes.node.isRequired +}; diff --git a/frontend/src/authentication/contexts/LocalAuthContext.js b/frontend/src/authentication/contexts/LocalAuthContext.js new file mode 100644 index 000000000..29fe3642c --- /dev/null +++ b/frontend/src/authentication/contexts/LocalAuthContext.js @@ -0,0 +1,113 @@ +import PropTypes from 'prop-types'; +import { createContext, useEffect, useReducer } from 'react'; +import { SET_ERROR } from 'globalErrors'; + +const anonymousUser = { + id: 'someone@amazon.com', + email: 'someone@amazon.com', + name: 'someone@amazon.com' +}; +const initialState = { + isAuthenticated: true, + isInitialized: true, + user: anonymousUser +}; + +const handlers = { + INITIALIZE: (state, action) => { + const { isAuthenticated, user } = action.payload; + + return { + ...state, + isAuthenticated, + isInitialized: true, + user + }; + }, + LOGIN: (state, action) => { + const { user } = action.payload; + + return { + ...state, + isAuthenticated: true, + user + }; + }, + LOGOUT: (state) => ({ + ...state, + isAuthenticated: false, + user: null + }) +}; + +const reducer = (state, action) => + handlers[action.type] ? handlers[action.type](state, action) : state; + +export const LocalAuthContext = createContext({ + ...initialState, + platform: 'local', + login: () => Promise.resolve(), + logout: () => Promise.resolve() +}); + +export const LocalAuthProvider = (props) => { + const { children } = props; + const [state, dispatch] = useReducer(reducer, initialState); + + useEffect(() => { + const initialize = async () => { + try { + dispatch({ + type: 'INITIALIZE', + payload: { + isAuthenticated: true, + user: anonymousUser + } + }); + } catch (error) { + dispatch({ + type: 'INITIALIZE', + payload: { + isAuthenticated: false, + user: null + } + }); + } + }; + + initialize().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); + }, []); + + const login = async () => { + dispatch({ + type: 'LOGIN', + payload: { + user: anonymousUser + } + }); + }; + + const logout = async () => { + dispatch({ + type: 'LOGOUT' + }); + }; + + return ( + + {children} + + ); +}; + +LocalAuthProvider.propTypes = { + children: PropTypes.node.isRequired +}; diff --git a/frontend/src/authentication/contexts/index.js b/frontend/src/authentication/contexts/index.js new file mode 100644 index 000000000..dc39de3c1 --- /dev/null +++ b/frontend/src/authentication/contexts/index.js @@ -0,0 +1 @@ +export * from './AuthContext'; diff --git a/frontend/src/authentication/hooks/index.js b/frontend/src/authentication/hooks/index.js new file mode 100644 index 000000000..cac031a42 --- /dev/null +++ b/frontend/src/authentication/hooks/index.js @@ -0,0 +1,2 @@ +export * from './useAuth'; +export * from './useToken'; diff --git a/frontend/src/authentication/hooks/useAuth.js b/frontend/src/authentication/hooks/useAuth.js new file mode 100644 index 000000000..975f24e08 --- /dev/null +++ b/frontend/src/authentication/hooks/useAuth.js @@ -0,0 +1,10 @@ +import { useContext } from 'react'; +import { CognitoAuthContext } from '../contexts/CognitoAuthContext'; +import { LocalAuthContext } from '../contexts/LocalAuthContext'; + +export const useAuth = () => + useContext( + !process.env.REACT_APP_COGNITO_USER_POOL_ID + ? LocalAuthContext + : CognitoAuthContext + ); diff --git a/frontend/src/hooks/useToken.js b/frontend/src/authentication/hooks/useToken.js similarity index 82% rename from frontend/src/hooks/useToken.js rename to frontend/src/authentication/hooks/useToken.js index 3bfed84c5..cffbf6d48 100644 --- a/frontend/src/hooks/useToken.js +++ b/frontend/src/authentication/hooks/useToken.js @@ -1,10 +1,9 @@ -import { useEffect, useState } from 'react'; import { Auth } from 'aws-amplify'; -import { SET_ERROR } from '../store/errorReducer'; -import { useDispatch } from '../store'; -import useAuth from "./useAuth"; +import { useEffect, useState } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useAuth } from './useAuth'; -const useToken = () => { +export const useToken = () => { const dispatch = useDispatch(); const auth = useAuth(); const [token, setToken] = useState(null); @@ -36,5 +35,3 @@ const useToken = () => { }); return token; }; - -export default useToken; diff --git a/frontend/src/authentication/index.js b/frontend/src/authentication/index.js new file mode 100644 index 000000000..615c788fb --- /dev/null +++ b/frontend/src/authentication/index.js @@ -0,0 +1,3 @@ +export * from './components'; +export * from './contexts'; +export * from './hooks'; diff --git a/frontend/src/views/authentication/Login.js b/frontend/src/authentication/views/Login.js similarity index 92% rename from frontend/src/views/authentication/Login.js rename to frontend/src/authentication/views/Login.js index 05c0a09d2..d178e4e7e 100644 --- a/frontend/src/views/authentication/Login.js +++ b/frontend/src/authentication/views/Login.js @@ -1,14 +1,14 @@ -import { Helmet } from 'react-helmet-async'; import { Box, Card, CardContent, Container, Typography } from '@mui/material'; -import { LoginAmplify } from '../../components/authentication/login'; -import Logo from '../../components/Logo'; -import useAuth from '../../hooks/useAuth'; +import { Helmet } from 'react-helmet-async'; +import { LoginAmplify } from '../components'; +import { useAuth } from '../hooks'; +import { Logo } from 'design'; const platformIcons = { Amplify: '/static/icons/amplify.svg' }; -const Login = () => { +export const Login = () => { const { platform } = useAuth(); return ( diff --git a/frontend/src/components/GuestGuard.js b/frontend/src/components/GuestGuard.js deleted file mode 100644 index 7617b971a..000000000 --- a/frontend/src/components/GuestGuard.js +++ /dev/null @@ -1,19 +0,0 @@ -import { Navigate } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import useAuth from '../hooks/useAuth'; - -const GuestGuard = ({ children }) => { - const { isAuthenticated } = useAuth(); - - if (isAuthenticated) { - return ; - } - - return <>{children}; -}; - -GuestGuard.propTypes = { - children: PropTypes.node -}; - -export default GuestGuard; diff --git a/frontend/src/components/ShareStatus.js b/frontend/src/components/ShareStatus.js deleted file mode 100644 index 44196555c..000000000 --- a/frontend/src/components/ShareStatus.js +++ /dev/null @@ -1,18 +0,0 @@ -import * as PropTypes from 'prop-types'; -import Label from './Label'; - -const ShareStatus = (props) => { - const { status } = props; - const setTagColor = () => { - if (['Approved', 'Share_Approved', 'Revoke_Approved', 'Share_Succeeded', 'Revoke_Succeeded'].includes(status)) return 'success'; - if (['Rejected', 'Revoked', 'Share_Rejected', 'Share_Failed', 'Revoke_Failed'].includes(status)) - return 'error'; - if (['PendingApproval', 'Submitted'].includes(status)) return 'warning'; - return 'info'; - }; - return ; -}; -ShareStatus.propTypes = { - status: PropTypes.string.isRequired -}; -export default ShareStatus; diff --git a/frontend/src/components/StackStatus.js b/frontend/src/components/StackStatus.js deleted file mode 100644 index ef01e8e50..000000000 --- a/frontend/src/components/StackStatus.js +++ /dev/null @@ -1,31 +0,0 @@ -import * as PropTypes from 'prop-types'; -import Label from './Label'; - -const StackStatus = (props) => { - const { status } = props; - const statusColor = (s) => { - let color; - switch (s) { - case 'CREATE_COMPLETE': - case 'UPDATE_COMPLETE': - color = 'success'; - break; - case 'CREATE_FAILED': - case 'ProcessFailed': - case 'DELETE_FAILED': - case 'DELETE_COMPLETE': - case 'ROLLBACK_COMPLETE': - case 'ROLLBACK_IN_PROGRESS': - color = 'error'; - break; - default: - color = 'info'; - } - return color; - }; - return ; -}; -StackStatus.propTypes = { - status: PropTypes.string.isRequired -}; -export default StackStatus; diff --git a/frontend/src/components/authentication/login/LoginAmplify.js b/frontend/src/components/authentication/login/LoginAmplify.js deleted file mode 100644 index 540d611bf..000000000 --- a/frontend/src/components/authentication/login/LoginAmplify.js +++ /dev/null @@ -1,21 +0,0 @@ -import { Button } from '@mui/material'; -import useAuth from '../../../hooks/useAuth'; - -const LoginAmplify = () => { - const { login } = useAuth(); - - return ( - - ); -}; - -export default LoginAmplify; diff --git a/frontend/src/components/authentication/login/index.js b/frontend/src/components/authentication/login/index.js deleted file mode 100644 index a66dc721d..000000000 --- a/frontend/src/components/authentication/login/index.js +++ /dev/null @@ -1 +0,0 @@ -export { default as LoginAmplify } from './LoginAmplify'; diff --git a/frontend/src/components/defaults/Filter.js b/frontend/src/components/defaults/Filter.js deleted file mode 100644 index 688b19fdb..000000000 --- a/frontend/src/components/defaults/Filter.js +++ /dev/null @@ -1,7 +0,0 @@ -const DefaultFilter = { - page: 1, - pageSize: 10, - term: '' -}; - -export default DefaultFilter; diff --git a/frontend/src/components/defaults/PagedResponseDefault.js b/frontend/src/components/defaults/PagedResponseDefault.js deleted file mode 100644 index d18544b38..000000000 --- a/frontend/src/components/defaults/PagedResponseDefault.js +++ /dev/null @@ -1,10 +0,0 @@ -const PagedResponseDefault = { - count: 1, - page: 1, - pages: 1, - hasNext: false, - hasPrevious: false, - nodes: [] -}; - -export default PagedResponseDefault; diff --git a/frontend/src/components/defaults/SelectListFilter.js b/frontend/src/components/defaults/SelectListFilter.js deleted file mode 100644 index 02f5609ef..000000000 --- a/frontend/src/components/defaults/SelectListFilter.js +++ /dev/null @@ -1,7 +0,0 @@ -const SelectListFilter = { - page: 1, - pageSize: 10000, - term: '' -}; - -export default SelectListFilter; diff --git a/frontend/src/components/defaults/index.js b/frontend/src/components/defaults/index.js deleted file mode 100644 index c715b6dbe..000000000 --- a/frontend/src/components/defaults/index.js +++ /dev/null @@ -1,5 +0,0 @@ -import PagedResponseDefault from './PagedResponseDefault'; -import DefaultFilter from './Filter'; -import SelectListFilter from './SelectListFilter'; - -export { PagedResponseDefault, DefaultFilter, SelectListFilter }; diff --git a/frontend/src/components/layout/DefaultSidebar.js b/frontend/src/components/layout/DefaultSidebar.js deleted file mode 100644 index 5ebb9afa5..000000000 --- a/frontend/src/components/layout/DefaultSidebar.js +++ /dev/null @@ -1,282 +0,0 @@ -import React, { useEffect, useState } from 'react'; -import * as BsIcons from 'react-icons/bs'; -import * as BiIcons from 'react-icons/bi'; -import { MdShowChart } from 'react-icons/md'; -import { useLocation } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import { - Box, - Button, - Divider, - Drawer, - IconButton, - useTheme -} from '@mui/material'; -import { AiOutlineExperiment } from 'react-icons/ai'; -import { FiCodesandbox, FiPackage } from 'react-icons/fi'; -import { SiJupyter } from 'react-icons/si'; -import { VscBook } from 'react-icons/vsc'; -import { ChevronLeft, ChevronRight, ShareOutlined } from '@mui/icons-material'; -import NavSection from '../NavSection'; -import Scrollbar from '../Scrollbar'; -import useSettings from '../../hooks/useSettings'; - -const DefaultSidebar = (props) => { - const { openDrawer, onOpenDrawerChange } = props; - const getSections = (isAdvancedMode) => - !isAdvancedMode - ? [ - { - title: 'Discover', - items: [ - { - title: 'Catalog', - path: '/console/catalog', - icon: - }, - { - title: 'Datasets', - path: '/console/datasets', - icon: - }, - { - title: 'Shares', - path: '/console/shares', - icon: - } - ] - }, - { - title: 'Play', - items: [ - { - title: 'Worksheets', - path: '/console/worksheets', - icon: - }, - { - title: 'ML Studio', - path: '/console/mlstudio', - icon: - }, - { - title: 'Dashboards', - path: '/console/dashboards', - icon: - } - ] - } - ] - : [ - { - title: 'Discover', - items: [ - { - title: 'Catalog', - path: '/console/catalog', - icon: - }, - { - title: 'Datasets', - path: '/console/datasets', - icon: - }, - { - title: 'Shares', - path: '/console/shares', - icon: - }, - { - title: 'Glossaries', - path: '/console/glossaries', - icon: - } - ] - }, - { - title: 'Play', - items: [ - { - title: 'Worksheets', - path: '/console/worksheets', - icon: - }, - { - title: 'Notebooks', - path: '/console/notebooks', - icon: - }, - { - title: 'ML Studio', - path: '/console/mlstudio', - icon: - }, - { - title: 'Pipelines', - path: '/console/pipelines', - icon: - }, - { - title: 'Dashboards', - path: '/console/dashboards', - icon: - } - ] - }, - { - title: 'Admin', - items: [ - { - title: 'Organizations', - path: '/console/organizations', - icon: - }, - { - title: 'Environments', - path: '/console/environments', - icon: - } - ] - } - ]; - const location = useLocation(); - const { settings } = useSettings(); - const [sections, setSections] = useState( - getSections(settings.isAdvancedMode) - ); - const [displayCollapser, setDisplayCollapser] = useState(false); - const theme = useTheme(); - - useEffect( - () => setSections(getSections(settings.isAdvancedMode)), - [settings.isAdvancedMode] - ); - - const content = ( - - - - {sections && - sections.map((section) => ( - - ))} - - - - - - - - - - ); - - return ( - <> - - - { - onOpenDrawerChange(false); - }} - > - {openDrawer} - {theme.direction === 'ltr' ? : } - - - {content} - - - { - setDisplayCollapser(true); - }} - onMouseLeave={() => { - setDisplayCollapser(false); - }} - > - {displayCollapser && ( - - { - onOpenDrawerChange(false); - }} - > - {openDrawer} - {openDrawer ? : } - - - )} - {content} - - - - ); -}; - -DefaultSidebar.propTypes = { - openDrawer: PropTypes.bool, - onOpenDrawerChange: PropTypes.func -}; - -export default DefaultSidebar; diff --git a/frontend/src/components/popovers/NotificationsPopover.js b/frontend/src/components/popovers/NotificationsPopover.js deleted file mode 100644 index 273133cfe..000000000 --- a/frontend/src/components/popovers/NotificationsPopover.js +++ /dev/null @@ -1,182 +0,0 @@ -import {useCallback, useEffect, useRef, useState} from 'react'; -import { - Avatar, - Badge, - Box, - IconButton, - Button, - Link, - List, - ListItem, - ListItemAvatar, - ListItemText, - Popover, - Tooltip, - Typography -} from '@mui/material'; -import { DeleteOutlined } from '@mui/icons-material'; -import countUnreadNotifications from '../../api/Notification/countUnreadNotifications'; -import listNotifications from '../../api/Notification/listNotifications'; -import markNotificationAsRead from '../../api/Notification/markAsRead'; -import BellIcon from '../../icons/Bell'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../defaults'; -import { PagedResponseDefault } from '../defaults'; - -const NotificationsPopover = () => { - const anchorRef = useRef(null); - const [open, setOpen] = useState(false); - const client = useClient(); - const [loading, setLoading] = useState(true); - const [notifications, setNotifications] = useState([]); - const [countInbox, setCountInbox] = useState(null); - - const handleOpen = () => { - setOpen(true); - }; - - const handleClose = () => { - setOpen(false); - }; - - const getCountInbox = useCallback(async () => { - setLoading(true); - const response = await client.query(countUnreadNotifications()); - if (!response.errors) { - setCountInbox(response.data.countUnreadNotifications); - fetchItems({ unread: true }); - } - setLoading(false); - },[client]); - - const fetchItems = useCallback(async (notificationFilter) => { - setLoading(true); - let filter = Object.assign({}, Defaults.SelectListFilter, notificationFilter) - const response = await client.query( - listNotifications(filter) - ); - if (!response.errors) { - setNotifications(response.data.listNotifications.nodes); - } - setLoading(false); - },[client]); - - - const markAsRead = useCallback(async (notificationUri) => { - const response = await client.mutate( - markNotificationAsRead(notificationUri) - ); - },[client]); - - const handleRemoveNotification = (idx) => { - let notificiationUri = notifications[idx].notificationUri - setNotifications((prevstate) => { - const rows = [...prevstate]; - rows.splice(idx, 1); - return rows; - }); - setCountInbox(countInbox - 1) - markAsRead(notificiationUri) - }; - - const clearNotifications = (idx) => { - let readNotifications = notifications - setNotifications([]) - setCountInbox(0) - readNotifications.forEach(note => { - markAsRead(note.notificationUri) - }); - }; - - useEffect(() => { - if (client) { - getCountInbox() - } - }, [client]); - - return ( - <> - - - - - - - - - - - Notifications - - - - {loading || notifications.length === 0 ? ( - - - There are no notifications - - - ) : ( - <> - - {notifications.length > 0 && - notifications.map((notification, idx) => ( - - - - - - {notification.message} - - } - /> - { - handleRemoveNotification(idx); - }} - > - - - - ))} - - - )} - - - ); -}; - -export default NotificationsPopover; diff --git a/frontend/src/components/popovers/SwitchModePopover.js b/frontend/src/components/popovers/SwitchModePopover.js deleted file mode 100644 index 053ffa708..000000000 --- a/frontend/src/components/popovers/SwitchModePopover.js +++ /dev/null @@ -1,30 +0,0 @@ -import { IconButton, Tooltip } from '@mui/material'; -import useSettings from '../../hooks/useSettings'; -import Refresh from '../../icons/Refresh'; - -/** - * @description Toggle "advanced" / "basic" mode. - * @returns {JSX.Element} - */ -const SwitchModePopover = () => { - const { settings, saveSettings } = useSettings(); - - /** - * @description Toggle mode. - */ - const handleSwitch = () => - saveSettings({ - ...settings, - isAdvancedMode: !settings.isAdvancedMode - }); - - return ( - - - - - - ); -}; - -export default SwitchModePopover; diff --git a/frontend/src/components/topics/TopicsData.js b/frontend/src/components/topics/TopicsData.js deleted file mode 100644 index 72f3c7c7e..000000000 --- a/frontend/src/components/topics/TopicsData.js +++ /dev/null @@ -1,16 +0,0 @@ -const TopicsData = { - Finances: 'Finances', - HumanResources: 'HumanResources', - Products: 'Products', - Services: 'Services', - Operations: 'Operations', - Research: 'Research', - Sales: 'Sales', - Orders: 'Orders', - Sites: 'Sites', - Energy: 'Energy', - Customers: 'Customers', - Misc: 'Misc' -}; - -export default Object.keys(TopicsData).map((t) => ({ label: t, value: t })); diff --git a/frontend/src/components/topics/index.js b/frontend/src/components/topics/index.js deleted file mode 100644 index 691c7cad9..000000000 --- a/frontend/src/components/topics/index.js +++ /dev/null @@ -1,3 +0,0 @@ -import TopicsData from './TopicsData'; - -export { TopicsData }; diff --git a/frontend/src/constants.js b/frontend/src/constants.js deleted file mode 100644 index 08ca473b1..000000000 --- a/frontend/src/constants.js +++ /dev/null @@ -1,32 +0,0 @@ -export const THEMES = { - LIGHT: 'LIGHT', - DARK: 'DARK' -}; - -export const AwsRegions = [ - { label: 'US East (Ohio)', value: 'us-east-2' }, - { label: 'US East (N. Virginia)', value: 'us-east-1' }, - { label: 'US West (N. California)', value: 'us-west-1' }, - { label: 'US West (Oregon)', value: 'us-west-2' }, - { label: 'Africa (Cape Town)', value: 'af-south-1' }, - { label: 'Asia Pacific (Hong Kong)', value: 'ap-east-1' }, - { label: 'Asia Pacific (Mumbai)', value: 'ap-south-1' }, - { label: 'Asia Pacific (Osaka-Local)', value: 'ap-northeast-3' }, - { label: 'Asia Pacific (Seoul)', value: 'ap-northeast-2' }, - { label: 'Asia Pacific (Singapore)', value: 'ap-southeast-1' }, - { label: 'Asia Pacific (Sydney)', value: 'ap-southeast-2' }, - { label: 'Asia Pacific (Tokyo)', value: 'ap-northeast-1' }, - { label: 'Canada (Central)', value: 'ca-central-1' }, - { label: 'China (Beijing)', value: 'cn-north-1' }, - { label: 'China (Ningxia)', value: 'cn-northwest-1' }, - { label: 'Europe (Frankfurt)', value: 'eu-central-1' }, - { label: 'Europe (Ireland)', value: 'eu-west-1' }, - { label: 'Europe (London)', value: 'eu-west-2' }, - { label: 'Europe (Milan)', value: 'eu-south-1' }, - { label: 'Europe (Paris)', value: 'eu-west-3' }, - { label: 'Europe (Stockholm)', value: 'eu-north-1' }, - { label: 'Middle East (Bahrain)', value: 'me-south-1' }, - { label: 'South America (São Paulo)', value: 'sa-east-1' }, - { label: 'AWS GovCloud (US-East)', value: 'us-gov-east-1' }, - { label: 'AWS GovCloud (US)', value: 'us-gov-west-1' } -]; diff --git a/frontend/src/contexts/AmplifyContext.js b/frontend/src/contexts/AmplifyContext.js deleted file mode 100644 index 024561d44..000000000 --- a/frontend/src/contexts/AmplifyContext.js +++ /dev/null @@ -1,147 +0,0 @@ -import { createContext, useEffect, useReducer } from 'react'; -import PropTypes from 'prop-types'; -import { Amplify, Auth } from 'aws-amplify'; -import { SET_ERROR } from '../store/errorReducer'; - -Amplify.configure({ - Auth: { - mandatorySignIn: true, - region: process.env.REACT_APP_COGNITO_REGION, - userPoolId: process.env.REACT_APP_COGNITO_USER_POOL_ID, - userPoolWebClientId: process.env.REACT_APP_COGNITO_APP_CLIENT_ID, - redirectSignIn: process.env.REACT_APP_COGNITO_REDIRECT_SIGNIN, - redirectSignOut: process.env.REACT_APP_COGNITO_REDIRECT_SIGNOUT - } -}); - -Auth.configure({ - oauth: { - domain: process.env.REACT_APP_COGNITO_DOMAIN, - redirectSignIn: process.env.REACT_APP_COGNITO_REDIRECT_SIGNIN, - redirectSignOut: process.env.REACT_APP_COGNITO_REDIRECT_SIGNOUT, - responseType: 'code' - } -}); - -const initialState = { - isAuthenticated: false, - isInitialized: false, - user: null -}; - -const handlers = { - INITIALIZE: (state, action) => { - const { isAuthenticated, user } = action.payload; - - return { - ...state, - isAuthenticated, - isInitialized: true, - user - }; - }, - LOGIN: (state, action) => { - const { user } = action.payload; - - return { - ...state, - isAuthenticated: true, - user - }; - }, - LOGOUT: (state) => ({ - ...state, - isAuthenticated: false, - user: null - }) -}; - -const reducer = (state, action) => - handlers[action.type] ? handlers[action.type](state, action) : state; - -const AuthContext = createContext({ - ...initialState, - platform: 'Amplify', - login: () => Promise.resolve(), - logout: () => Promise.resolve() -}); - -export const AuthProvider = (props) => { - const { children } = props; - const [state, dispatch] = useReducer(reducer, initialState); - - useEffect(() => { - const initialize = async () => { - try { - const user = await Auth.currentAuthenticatedUser(); - dispatch({ - type: 'INITIALIZE', - payload: { - isAuthenticated: true, - user: { - id: user.attributes.email, - email: user.attributes.email, - name: user.attributes.email - } - } - }); - } catch (error) { - dispatch({ - type: 'INITIALIZE', - payload: { - isAuthenticated: false, - user: null - } - }); - } - }; - - initialize().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - }, []); - - const login = async () => { - Auth.federatedSignIn() - .then((user) => { - dispatch({ - type: 'LOGIN', - payload: { - user: { - id: user.attributes.email, - email: user.attributes.email, - name: user.attributes.email - } - } - }); - }) - .catch((e) => { - console.log('Failed to authenticate user', e); - }); - }; - - const logout = async () => { - await Auth.signOut(); - dispatch({ - type: 'LOGOUT' - }); - }; - - return ( - - {children} - - ); -}; - -AuthProvider.propTypes = { - children: PropTypes.node.isRequired -}; - -export default AuthContext; diff --git a/frontend/src/contexts/LocalContext.js b/frontend/src/contexts/LocalContext.js deleted file mode 100644 index 196081904..000000000 --- a/frontend/src/contexts/LocalContext.js +++ /dev/null @@ -1,115 +0,0 @@ -import { createContext, useEffect, useReducer } from 'react'; -import PropTypes from 'prop-types'; -import { SET_ERROR } from '../store/errorReducer'; - -const anonymousUser = { - id: 'someone@amazon.com', - email: 'someone@amazon.com', - name: 'someone@amazon.com' -}; -const initialState = { - isAuthenticated: true, - isInitialized: true, - user: anonymousUser -}; - -const handlers = { - INITIALIZE: (state, action) => { - const { isAuthenticated, user } = action.payload; - - return { - ...state, - isAuthenticated, - isInitialized: true, - user - }; - }, - LOGIN: (state, action) => { - const { user } = action.payload; - - return { - ...state, - isAuthenticated: true, - user - }; - }, - LOGOUT: (state) => ({ - ...state, - isAuthenticated: false, - user: null - }) -}; - -const reducer = (state, action) => - handlers[action.type] ? handlers[action.type](state, action) : state; - -const LocalContext = createContext({ - ...initialState, - platform: 'local', - login: () => Promise.resolve(), - logout: () => Promise.resolve() -}); - -export const AuthProvider = (props) => { - const { children } = props; - const [state, dispatch] = useReducer(reducer, initialState); - - useEffect(() => { - const initialize = async () => { - try { - dispatch({ - type: 'INITIALIZE', - payload: { - isAuthenticated: true, - user: anonymousUser - } - }); - } catch (error) { - dispatch({ - type: 'INITIALIZE', - payload: { - isAuthenticated: false, - user: null - } - }); - } - }; - - initialize().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - }, []); - - const login = async () => { - dispatch({ - type: 'LOGIN', - payload: { - user: anonymousUser - } - }); - }; - - const logout = async () => { - dispatch({ - type: 'LOGOUT' - }); - }; - - return ( - - {children} - - ); -}; - -AuthProvider.propTypes = { - children: PropTypes.node.isRequired -}; - -export default LocalContext; diff --git a/frontend/src/contexts/SettingsContext.js b/frontend/src/contexts/SettingsContext.js deleted file mode 100644 index cc2569117..000000000 --- a/frontend/src/contexts/SettingsContext.js +++ /dev/null @@ -1,84 +0,0 @@ -import { createContext, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; -import { THEMES } from '../constants'; - -const initialSettings = { - compact: true, - responsiveFontSizes: true, - roundedCorners: true, - theme: THEMES.DARK, - isAdvancedMode: true, - tabIcons: false -}; - -export const restoreSettings = () => { - let settings = null; - - try { - const storedData = window.localStorage.getItem('settings'); - - if (storedData) { - settings = JSON.parse(storedData); - } else { - settings = { - compact: true, - responsiveFontSizes: true, - roundedCorners: true, - isAdvancedMode: true, - tabIcons: false, - theme: window.matchMedia('(prefers-color-scheme: dark)').matches - ? THEMES.DARK - : THEMES.LIGHT - }; - } - } catch (err) { - console.error(err); - } - return settings; -}; - -export const storeSettings = (settings) => { - window.localStorage.setItem('settings', JSON.stringify(settings)); -}; - -const SettingsContext = createContext({ - settings: initialSettings, - saveSettings: () => {} -}); - -export const SettingsProvider = (props) => { - const { children } = props; - const [settings, setSettings] = useState(initialSettings); - - useEffect(() => { - const restoredSettings = restoreSettings(); - - if (restoredSettings) { - setSettings(restoredSettings); - } - }, []); - - const saveSettings = (updatedSettings) => { - setSettings(updatedSettings); - storeSettings(updatedSettings); - }; - - return ( - - {children} - - ); -}; - -SettingsProvider.propTypes = { - children: PropTypes.node.isRequired -}; - -export const SettingsConsumer = SettingsContext.Consumer; - -export default SettingsContext; diff --git a/frontend/src/components/ArchiveObjectWithFrictionModal.js b/frontend/src/design/components/ArchiveObjectWithFrictionModal.js similarity index 95% rename from frontend/src/components/ArchiveObjectWithFrictionModal.js rename to frontend/src/design/components/ArchiveObjectWithFrictionModal.js index 6da29df4b..8684c6b28 100644 --- a/frontend/src/components/ArchiveObjectWithFrictionModal.js +++ b/frontend/src/design/components/ArchiveObjectWithFrictionModal.js @@ -10,7 +10,7 @@ import { import { useState } from 'react'; import { ArchiveOutlined } from '@mui/icons-material'; -const ArchiveObjectWithFrictionModal = (props) => { +export const ArchiveObjectWithFrictionModal = (props) => { const { objectName, archiveMessage, @@ -79,5 +79,3 @@ ArchiveObjectWithFrictionModal.propTypes = { archiveFunction: PropTypes.func.isRequired, open: PropTypes.bool.isRequired }; - -export default ArchiveObjectWithFrictionModal; diff --git a/frontend/src/components/CircularProgress.js b/frontend/src/design/components/CircularProgress.js similarity index 94% rename from frontend/src/components/CircularProgress.js rename to frontend/src/design/components/CircularProgress.js index 8677b559a..6edfcab2f 100644 --- a/frontend/src/components/CircularProgress.js +++ b/frontend/src/design/components/CircularProgress.js @@ -25,7 +25,7 @@ const CircularProgressValue = styled('path')(({ theme }) => ({ } })); -const CircularProgress = (props) => { +export const CircularProgress = (props) => { const { value, ...other } = props; return ( @@ -47,5 +47,3 @@ const CircularProgress = (props) => { CircularProgress.propTypes = { value: PropTypes.number.isRequired }; - -export default CircularProgress; diff --git a/frontend/src/components/DeleteObjectModal.js b/frontend/src/design/components/DeleteObjectModal.js similarity index 94% rename from frontend/src/components/DeleteObjectModal.js rename to frontend/src/design/components/DeleteObjectModal.js index b3b654c68..3392ee288 100644 --- a/frontend/src/components/DeleteObjectModal.js +++ b/frontend/src/design/components/DeleteObjectModal.js @@ -2,7 +2,7 @@ import PropTypes from 'prop-types'; import { Box, Button, CardContent, Dialog, Typography } from '@mui/material'; import { FaTrash } from 'react-icons/fa'; -const DeleteObjectModal = (props) => { +export const DeleteObjectModal = (props) => { const { objectName, deleteMessage, @@ -53,5 +53,3 @@ DeleteObjectModal.propTypes = { deleteFunction: PropTypes.func.isRequired, open: PropTypes.bool.isRequired }; - -export default DeleteObjectModal; diff --git a/frontend/src/components/DeleteObjectWithFrictionModal.js b/frontend/src/design/components/DeleteObjectWithFrictionModal.js similarity index 96% rename from frontend/src/components/DeleteObjectWithFrictionModal.js rename to frontend/src/design/components/DeleteObjectWithFrictionModal.js index e4022a178..b7a610bdf 100644 --- a/frontend/src/components/DeleteObjectWithFrictionModal.js +++ b/frontend/src/design/components/DeleteObjectWithFrictionModal.js @@ -13,7 +13,7 @@ import { import React, { useState } from 'react'; import { FaTrash } from 'react-icons/fa'; -const DeleteObjectWithFrictionModal = (props) => { +export const DeleteObjectWithFrictionModal = (props) => { const { objectName, deleteMessage, @@ -114,5 +114,3 @@ DeleteObjectWithFrictionModal.propTypes = { open: PropTypes.bool.isRequired, isAWSResource: PropTypes.bool }; - -export default DeleteObjectWithFrictionModal; diff --git a/frontend/src/components/ErrorNotification.js b/frontend/src/design/components/ErrorNotification.js similarity index 85% rename from frontend/src/components/ErrorNotification.js rename to frontend/src/design/components/ErrorNotification.js index 1ee2fe09e..1384b7124 100644 --- a/frontend/src/components/ErrorNotification.js +++ b/frontend/src/design/components/ErrorNotification.js @@ -1,11 +1,10 @@ -import React, { useEffect } from 'react'; -import { useSnackbar } from 'notistack'; -import { IconButton } from '@mui/material'; import { CancelRounded } from '@mui/icons-material'; -import { useDispatch, useSelector } from '../store'; -import { HIDE_ERROR } from '../store/errorReducer'; +import { IconButton } from '@mui/material'; +import { useSnackbar } from 'notistack'; +import React, { useEffect } from 'react'; +import { HIDE_ERROR, useDispatch, useSelector } from 'globalErrors'; -const ErrorNotification = () => { +export const ErrorNotification = () => { const dispatch = useDispatch(); const error = useSelector((state) => state.error.error); const { enqueueSnackbar, closeSnackbar } = useSnackbar(); @@ -37,5 +36,3 @@ const ErrorNotification = () => { return <>; }; - -export default ErrorNotification; diff --git a/frontend/src/components/FileDropzone.js b/frontend/src/design/components/FileDropzone.js similarity index 96% rename from frontend/src/components/FileDropzone.js rename to frontend/src/design/components/FileDropzone.js index 522ff93fd..433e22780 100644 --- a/frontend/src/components/FileDropzone.js +++ b/frontend/src/design/components/FileDropzone.js @@ -1,5 +1,3 @@ -import PropTypes from 'prop-types'; -import { useDropzone } from 'react-dropzone'; import { Box, Button, @@ -12,11 +10,12 @@ import { Tooltip, Typography } from '@mui/material'; -import DuplicateIcon from '../icons/Duplicate'; -import XIcon from '../icons/X'; -import bytesToSize from '../utils/bytesToSize'; +import PropTypes from 'prop-types'; +import { useDropzone } from 'react-dropzone'; +import { bytesToSize } from 'utils'; +import { DuplicateIcon, XIcon } from '../icons'; -const FileDropzone = (props) => { +export const FileDropzone = (props) => { const { accept, disabled, @@ -196,5 +195,3 @@ FileDropzone.propTypes = { FileDropzone.defaultProps = { files: [] }; - -export default FileDropzone; diff --git a/frontend/src/components/GlobalStyles.js b/frontend/src/design/components/GlobalStyles.js similarity index 90% rename from frontend/src/components/GlobalStyles.js rename to frontend/src/design/components/GlobalStyles.js index f28cdd75a..28f0bcaf5 100644 --- a/frontend/src/components/GlobalStyles.js +++ b/frontend/src/design/components/GlobalStyles.js @@ -26,10 +26,8 @@ const useStyles = makeStyles(() => }) ); -const GlobalStyles = () => { +export const GlobalStyles = () => { useStyles(); return null; }; - -export default GlobalStyles; diff --git a/frontend/src/components/IconAvatar.js b/frontend/src/design/components/IconAvatar.js similarity index 90% rename from frontend/src/components/IconAvatar.js rename to frontend/src/design/components/IconAvatar.js index fbc118e4f..682a763ff 100644 --- a/frontend/src/components/IconAvatar.js +++ b/frontend/src/design/components/IconAvatar.js @@ -11,7 +11,7 @@ const useStyles = makeStyles((theme) => ({ } })); -const IconAvatar = (props) => { +export const IconAvatar = (props) => { const { icon } = props; const classes = useStyles(); return <>{icon && {icon}}; @@ -20,5 +20,3 @@ const IconAvatar = (props) => { IconAvatar.propTypes = { icon: PropTypes.object.isRequired }; - -export default IconAvatar; diff --git a/frontend/src/components/Label.js b/frontend/src/design/components/Label.js similarity index 91% rename from frontend/src/components/Label.js rename to frontend/src/design/components/Label.js index 46a718233..9801fbb21 100644 --- a/frontend/src/components/Label.js +++ b/frontend/src/design/components/Label.js @@ -1,7 +1,7 @@ import PropTypes from 'prop-types'; import { styled } from '@mui/styles'; -const LabelRoot = styled('span')(({ theme, styleProps }) => { +export const LabelRoot = styled('span')(({ theme, styleProps }) => { const backgroundColor = theme.palette[styleProps.color].main; const color = theme.palette[styleProps.color].contrastText; @@ -29,7 +29,7 @@ const LabelRoot = styled('span')(({ theme, styleProps }) => { }; }); -const Label = (props) => { +export const Label = (props) => { const { color = 'primary', children, ...other } = props; const styleProps = { color }; @@ -51,5 +51,3 @@ Label.propTypes = { 'success' ]) }; - -export default Label; diff --git a/frontend/src/components/LoadingScreen.js b/frontend/src/design/components/LoadingScreen.js similarity index 84% rename from frontend/src/components/LoadingScreen.js rename to frontend/src/design/components/LoadingScreen.js index 706148bab..2ee03b2f6 100644 --- a/frontend/src/components/LoadingScreen.js +++ b/frontend/src/design/components/LoadingScreen.js @@ -2,7 +2,7 @@ import { useEffect } from 'react'; import NProgress from 'nprogress'; import { Box } from '@mui/material'; -const LoadingScreen = () => { +export const LoadingScreen = () => { useEffect(() => { NProgress.start(); @@ -20,5 +20,3 @@ const LoadingScreen = () => { /> ); }; - -export default LoadingScreen; diff --git a/frontend/src/components/Logo.js b/frontend/src/design/components/Logo.js similarity index 92% rename from frontend/src/components/Logo.js rename to frontend/src/design/components/Logo.js index 1816abeba..e72082b97 100644 --- a/frontend/src/components/Logo.js +++ b/frontend/src/design/components/Logo.js @@ -1,6 +1,6 @@ import { Box, CardMedia, Grid, Typography } from '@mui/material'; -const Logo = () => ( +export const Logo = () => ( <> @@ -23,4 +23,3 @@ const Logo = () => ( ); -export default Logo; diff --git a/frontend/src/components/NavItem.js b/frontend/src/design/components/NavItem.js similarity index 94% rename from frontend/src/components/NavItem.js rename to frontend/src/design/components/NavItem.js index d22aba77e..a076babaf 100644 --- a/frontend/src/components/NavItem.js +++ b/frontend/src/design/components/NavItem.js @@ -2,10 +2,9 @@ import { useState } from 'react'; import { NavLink as RouterLink } from 'react-router-dom'; import PropTypes from 'prop-types'; import { Box, Button, Collapse, ListItem } from '@mui/material'; -import ChevronDownIcon from '../icons/ChevronDown'; -import ChevronRightIcon from '../icons/ChevronRight'; +import { ChevronDownIcon, ChevronRightIcon } from '../icons'; -const NavItem = (props) => { +export const NavItem = (props) => { const { active, children, @@ -122,5 +121,3 @@ NavItem.defaultProps = { active: false, open: false }; - -export default NavItem; diff --git a/frontend/src/components/NavSection.js b/frontend/src/design/components/NavSection.js similarity index 95% rename from frontend/src/components/NavSection.js rename to frontend/src/design/components/NavSection.js index aff925159..3199f7261 100644 --- a/frontend/src/components/NavSection.js +++ b/frontend/src/design/components/NavSection.js @@ -1,7 +1,7 @@ import PropTypes from 'prop-types'; import { matchPath } from 'react-router-dom'; import { List, ListSubheader } from '@mui/material'; -import NavItem from './NavItem'; +import { NavItem } from './NavItem'; const renderNavItems = ({ depth = 0, items, pathname }) => ( @@ -77,7 +77,7 @@ const reduceChildRoutes = ({ acc, pathname, item, depth }) => { return acc; }; -const NavSection = (props) => { +export const NavSection = (props) => { const { items, pathname, title, ...other } = props; return ( @@ -112,5 +112,3 @@ NavSection.propTypes = { pathname: PropTypes.string, title: PropTypes.string }; - -export default NavSection; diff --git a/frontend/src/components/ObjectBrief.js b/frontend/src/design/components/ObjectBrief.js similarity index 97% rename from frontend/src/components/ObjectBrief.js rename to frontend/src/design/components/ObjectBrief.js index 3c4490cd4..19a7a1291 100644 --- a/frontend/src/components/ObjectBrief.js +++ b/frontend/src/design/components/ObjectBrief.js @@ -8,9 +8,9 @@ import { Typography, Divider } from '@mui/material'; -import Label from './Label'; +import { Label } from './Label'; -const ObjectBrief = (props) => { +export const ObjectBrief = (props) => { const { uri, description, @@ -135,5 +135,3 @@ ObjectBrief.propTypes = { title: PropTypes.string, confidentiality: PropTypes.string }; - -export default ObjectBrief; diff --git a/frontend/src/components/ObjectMetadata.js b/frontend/src/design/components/ObjectMetadata.js similarity index 96% rename from frontend/src/components/ObjectMetadata.js rename to frontend/src/design/components/ObjectMetadata.js index 6cc74da5f..c59bcb74e 100644 --- a/frontend/src/components/ObjectMetadata.js +++ b/frontend/src/design/components/ObjectMetadata.js @@ -1,5 +1,3 @@ -import { Link as RouterLink } from 'react-router-dom'; -import PropTypes from 'prop-types'; import { Card, CardContent, @@ -9,13 +7,15 @@ import { ListItem, Typography } from '@mui/material'; +import PropTypes from 'prop-types'; import React from 'react'; -import TextAvatar from './TextAvatar'; -import StackStatus from './StackStatus'; -import Label from './Label'; -import { dayjs } from '../utils/dayjs'; +import { Link as RouterLink } from 'react-router-dom'; +import { StackStatus } from '.'; +import { dayjs } from 'utils'; +import { Label } from './Label'; +import { TextAvatar } from './TextAvatar'; -const ObjectMetadata = (props) => { +export const ObjectMetadata = (props) => { const { owner, admins, @@ -245,5 +245,3 @@ ObjectMetadata.propTypes = { objectType: PropTypes.string, quicksightEnabled: PropTypes.bool }; - -export default ObjectMetadata; diff --git a/frontend/src/components/Pager.js b/frontend/src/design/components/Pager.js similarity index 93% rename from frontend/src/components/Pager.js rename to frontend/src/design/components/Pager.js index d6f07061d..236cb35b7 100644 --- a/frontend/src/components/Pager.js +++ b/frontend/src/design/components/Pager.js @@ -1,7 +1,7 @@ import { Box, Pagination } from '@mui/material'; import * as PropTypes from 'prop-types'; -function Pager(props) { +export function Pager(props) { const { items, mgTop, mgBottom, onChange } = props; return ( { +export const RefreshTableMenu = memo(({ refresh }) => { const anchorRef = useRef(null); const [openMenu, setOpenMenu] = useState(false); @@ -62,9 +62,8 @@ const RefreshTableMenu = ({ refresh }) => { ); -}; +}); + RefreshTableMenu.propTypes = { refresh: PropTypes.func }; - -export default memo(RefreshTableMenu); diff --git a/frontend/src/components/Scrollbar.js b/frontend/src/design/components/Scrollbar.js similarity index 77% rename from frontend/src/components/Scrollbar.js rename to frontend/src/design/components/Scrollbar.js index daceb0eb1..4c55d6a71 100644 --- a/frontend/src/components/Scrollbar.js +++ b/frontend/src/design/components/Scrollbar.js @@ -5,8 +5,6 @@ import { styled } from '@mui/styles'; const ScrollbarRoot = styled(SimpleBar)``; -const Scrollbar = forwardRef((props, ref) => ( +export const Scrollbar = forwardRef((props, ref) => ( )); - -export default Scrollbar; diff --git a/frontend/src/components/SearchInput.js b/frontend/src/design/components/SearchInput.js similarity index 88% rename from frontend/src/components/SearchInput.js rename to frontend/src/design/components/SearchInput.js index c111f4847..cac3f3c2d 100644 --- a/frontend/src/components/SearchInput.js +++ b/frontend/src/design/components/SearchInput.js @@ -1,8 +1,8 @@ import { Box, Card, Input } from '@mui/material'; import * as PropTypes from 'prop-types'; -import SearchIcon from '../icons/Search'; +import { SearchIcon } from '../icons'; -function SearchInput(props) { +export function SearchInput(props) { const { onChange, onKeyUp, value } = props; return ( @@ -39,5 +39,3 @@ SearchInput.propTypes = { onKeyUp: PropTypes.func, value: PropTypes.string }; - -export default SearchInput; diff --git a/frontend/src/components/SettingsDrawer.js b/frontend/src/design/components/SettingsDrawer.js similarity index 96% rename from frontend/src/components/SettingsDrawer.js rename to frontend/src/design/components/SettingsDrawer.js index 6f161fd9c..cd69ffbc6 100644 --- a/frontend/src/components/SettingsDrawer.js +++ b/frontend/src/design/components/SettingsDrawer.js @@ -1,4 +1,3 @@ -import { useEffect, useState } from 'react'; import { Box, Button, @@ -10,9 +9,10 @@ import { Tooltip, Typography } from '@mui/material'; +import { useEffect, useState } from 'react'; import { THEMES } from '../constants'; -import useSettings from '../hooks/useSettings'; -import Adjustments from '../icons/adjustments'; +import { useSettings } from '../hooks'; +import { AdjustmentsIcon } from '../icons'; const getValues = (settings) => ({ compact: settings.compact, @@ -24,7 +24,7 @@ const getValues = (settings) => ({ tabIcons: settings.tabIcons }); -const SettingsDrawer = () => { +export const SettingsDrawer = () => { const { settings, saveSettings } = useSettings(); const [open, setOpen] = useState(false); const [values, setValues] = useState(getValues(settings)); @@ -57,7 +57,7 @@ const SettingsDrawer = () => { <> - + { ); }; - -export default SettingsDrawer; diff --git a/frontend/src/design/components/ShareStatus.js b/frontend/src/design/components/ShareStatus.js new file mode 100644 index 000000000..1da34377f --- /dev/null +++ b/frontend/src/design/components/ShareStatus.js @@ -0,0 +1,34 @@ +import * as PropTypes from 'prop-types'; +import { Label } from './Label'; + +export const ShareStatus = (props) => { + const { status } = props; + const setTagColor = () => { + if ( + [ + 'Approved', + 'Share_Approved', + 'Revoke_Approved', + 'Share_Succeeded', + 'Revoke_Succeeded' + ].includes(status) + ) + return 'success'; + if ( + [ + 'Rejected', + 'Revoked', + 'Share_Rejected', + 'Share_Failed', + 'Revoke_Failed' + ].includes(status) + ) + return 'error'; + if (['PendingApproval', 'Submitted'].includes(status)) return 'warning'; + return 'info'; + }; + return ; +}; +ShareStatus.propTypes = { + status: PropTypes.string.isRequired +}; diff --git a/frontend/src/components/SplashScreen.js b/frontend/src/design/components/SplashScreen.js similarity index 81% rename from frontend/src/components/SplashScreen.js rename to frontend/src/design/components/SplashScreen.js index 1e9882012..040819cf5 100644 --- a/frontend/src/components/SplashScreen.js +++ b/frontend/src/design/components/SplashScreen.js @@ -1,7 +1,7 @@ import { Box } from '@mui/material'; -import Logo from './Logo'; +import { Logo } from './Logo'; -const SlashScreen = () => ( +export const SplashScreen = () => ( ( ); - -export default SlashScreen; diff --git a/frontend/src/design/components/StackStatus.js b/frontend/src/design/components/StackStatus.js new file mode 100644 index 000000000..080b1a14c --- /dev/null +++ b/frontend/src/design/components/StackStatus.js @@ -0,0 +1,30 @@ +import * as PropTypes from 'prop-types'; +import { Label } from './Label'; + +export const StackStatus = (props) => { + const { status } = props; + const statusColor = (s) => { + let color; + switch (s) { + case 'CREATE_COMPLETE': + case 'UPDATE_COMPLETE': + color = 'success'; + break; + case 'CREATE_FAILED': + case 'ProcessFailed': + case 'DELETE_FAILED': + case 'DELETE_COMPLETE': + case 'ROLLBACK_COMPLETE': + case 'ROLLBACK_IN_PROGRESS': + color = 'error'; + break; + default: + color = 'info'; + } + return color; + }; + return ; +}; +StackStatus.propTypes = { + status: PropTypes.string.isRequired +}; diff --git a/frontend/src/components/TagsInput.js b/frontend/src/design/components/TagsInput.js similarity index 99% rename from frontend/src/components/TagsInput.js rename to frontend/src/design/components/TagsInput.js index 77b67f0af..c35ec95e2 100644 --- a/frontend/src/components/TagsInput.js +++ b/frontend/src/design/components/TagsInput.js @@ -178,7 +178,7 @@ const keyCodes = { RIGHT_ARROW: 39 }; -class ChipInput extends React.Component { +class RawChipInput extends React.Component { state = { chips: [], errorText: undefined, @@ -690,7 +690,7 @@ class ChipInput extends React.Component { } } -ChipInput.propTypes = { +RawChipInput.propTypes = { /** Allows duplicate chips if set to true. */ allowDuplicates: PropTypes.bool, /** If true, the placeholder will always be visible. */ @@ -758,7 +758,7 @@ ChipInput.propTypes = { variant: PropTypes.oneOf(['outlined', 'standard', 'filled']) }; -ChipInput.defaultProps = { +RawChipInput.defaultProps = { allowDuplicates: false, blurBehavior: 'clear', clearInputValueOnChange: false, @@ -769,7 +769,7 @@ ChipInput.defaultProps = { variant: 'standard' }; -export default withStyles(styles, { name: 'WAMuiChipInput' })(ChipInput); +export const ChipInput = withStyles(styles, { name: 'WAMuiChipInput' })(RawChipInput); export const defaultChipRenderer = ( { diff --git a/frontend/src/components/TextAvatar.js b/frontend/src/design/components/TextAvatar.js similarity index 89% rename from frontend/src/components/TextAvatar.js rename to frontend/src/design/components/TextAvatar.js index 24c9b7d37..412f8f0d1 100644 --- a/frontend/src/components/TextAvatar.js +++ b/frontend/src/design/components/TextAvatar.js @@ -9,7 +9,7 @@ const useStyles = makeStyles((theme) => ({ } })); -const TextAvatar = (props) => { +export const TextAvatar = (props) => { const { name } = props; const classes = useStyles(); return ( @@ -24,5 +24,3 @@ const TextAvatar = (props) => { TextAvatar.propTypes = { name: PropTypes.string.isRequired }; - -export default TextAvatar; diff --git a/frontend/src/components/UpVoteButton.js b/frontend/src/design/components/UpVoteButton.js similarity index 91% rename from frontend/src/components/UpVoteButton.js rename to frontend/src/design/components/UpVoteButton.js index 54a67e745..9b99e3913 100644 --- a/frontend/src/components/UpVoteButton.js +++ b/frontend/src/design/components/UpVoteButton.js @@ -3,7 +3,7 @@ import { ThumbUpAlt, ThumbUpOffAlt } from '@mui/icons-material'; import * as PropTypes from 'prop-types'; import React from 'react'; -const UpVoteButton = (props) => { +export const UpVoteButton = (props) => { const { upVoted, onClick, upVotes } = props; return ( + + + + ); + + return ( + <> + + + { + onOpenDrawerChange(false); + }} + > + {openDrawer} + {theme.direction === 'ltr' ? : } + + + {content} + + + { + setDisplayCollapser(true); + }} + onMouseLeave={() => { + setDisplayCollapser(false); + }} + > + {displayCollapser && ( + + { + onOpenDrawerChange(false); + }} + > + {openDrawer} + {openDrawer ? : } + + + )} + {content} + + + + ); +}; + +DefaultSidebar.propTypes = { + openDrawer: PropTypes.bool, + onOpenDrawerChange: PropTypes.func +}; diff --git a/frontend/src/design/components/layout/index.js b/frontend/src/design/components/layout/index.js new file mode 100644 index 000000000..64ce55b57 --- /dev/null +++ b/frontend/src/design/components/layout/index.js @@ -0,0 +1,3 @@ +export * from './DefaultLayout'; +export * from './DefaultNavbar'; +export * from './DefaultSidebar'; diff --git a/frontend/src/components/popovers/AccountPopover.js b/frontend/src/design/components/popovers/AccountPopover.js similarity index 87% rename from frontend/src/components/popovers/AccountPopover.js rename to frontend/src/design/components/popovers/AccountPopover.js index 3ed1daed7..1f02e121c 100644 --- a/frontend/src/components/popovers/AccountPopover.js +++ b/frontend/src/design/components/popovers/AccountPopover.js @@ -1,6 +1,3 @@ -import { useRef, useState } from 'react'; -import { useNavigate, Link as RouterLink } from 'react-router-dom'; -import { useSnackbar } from 'notistack'; import { Box, Button, @@ -12,12 +9,15 @@ import { Popover, Typography } from '@mui/material'; -import useAuth from '../../hooks/useAuth'; -import TextAvatar from '../TextAvatar'; -import useGroups from '../../hooks/useGroups'; -import Cog from '../../icons/Cog'; +import { useSnackbar } from 'notistack'; +import { useRef, useState } from 'react'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import { useAuth } from 'authentication'; +import { useGroups } from 'services'; +import { CogIcon } from '../../icons'; +import { TextAvatar } from '../TextAvatar'; -const AccountPopover = () => { +export const AccountPopover = () => { const anchorRef = useRef(null); const { user, logout } = useAuth(); const groups = useGroups(); @@ -87,7 +87,7 @@ const AccountPopover = () => { {groups && groups.indexOf('DAAdministrators') !== -1 && ( - + { ); }; - -export default AccountPopover; diff --git a/frontend/src/design/components/popovers/NotificationsPopover.js b/frontend/src/design/components/popovers/NotificationsPopover.js new file mode 100644 index 000000000..0491a644c --- /dev/null +++ b/frontend/src/design/components/popovers/NotificationsPopover.js @@ -0,0 +1,186 @@ +import { DeleteOutlined } from '@mui/icons-material'; +import { + Avatar, + Badge, + Box, + Button, + IconButton, + Link, + List, + ListItem, + ListItemAvatar, + ListItemText, + Popover, + Tooltip, + Typography +} from '@mui/material'; +import { useCallback, useEffect, useRef, useState } from 'react'; +import { + countUnreadNotifications, + listNotifications, + markNotificationAsRead, + useClient +} from 'services'; +import { BellIcon } from '../../icons'; +import { Defaults } from '../defaults'; + +export const NotificationsPopover = () => { + const anchorRef = useRef(null); + const [open, setOpen] = useState(false); + const client = useClient(); + const [loading, setLoading] = useState(true); + const [notifications, setNotifications] = useState([]); + const [countInbox, setCountInbox] = useState(null); + + const handleOpen = () => { + setOpen(true); + }; + + const handleClose = () => { + setOpen(false); + }; + + const getCountInbox = useCallback(async () => { + setLoading(true); + const response = await client.query(countUnreadNotifications()); + if (!response.errors) { + setCountInbox(response.data.countUnreadNotifications); + fetchItems({ unread: true }); + } + setLoading(false); + }, [client]); + + const fetchItems = useCallback( + async (notificationFilter) => { + setLoading(true); + let filter = Object.assign( + {}, + Defaults.selectListFilter, + notificationFilter + ); + const response = await client.query(listNotifications(filter)); + if (!response.errors) { + setNotifications(response.data.listNotifications.nodes); + } + setLoading(false); + }, + [client] + ); + + const markAsRead = useCallback( + async (notificationUri) => { + await client.mutate(markNotificationAsRead(notificationUri)); + }, + [client] + ); + + const handleRemoveNotification = (idx) => { + let notificiationUri = notifications[idx].notificationUri; + setNotifications((prevstate) => { + const rows = [...prevstate]; + rows.splice(idx, 1); + return rows; + }); + setCountInbox(countInbox - 1); + markAsRead(notificiationUri); + }; + + const clearNotifications = (idx) => { + let readNotifications = notifications; + setNotifications([]); + setCountInbox(0); + readNotifications.forEach((note) => { + markAsRead(note.notificationUri); + }); + }; + + useEffect(() => { + if (client) { + getCountInbox(); + } + }, [client]); + + return ( + <> + + + + + + + + + + + Notifications + + + + {loading || notifications.length === 0 ? ( + + + There are no notifications + + + ) : ( + <> + + {notifications.length > 0 && + notifications.map((notification, idx) => ( + + + + + + {notification.message} + + } + /> + { + handleRemoveNotification(idx); + }} + > + + + + ))} + + + )} + + + ); +}; diff --git a/frontend/src/design/components/popovers/SwitchModePopover.js b/frontend/src/design/components/popovers/SwitchModePopover.js new file mode 100644 index 000000000..736f3a2e2 --- /dev/null +++ b/frontend/src/design/components/popovers/SwitchModePopover.js @@ -0,0 +1,28 @@ +import { IconButton, Tooltip } from '@mui/material'; +import { useSettings } from '../../hooks'; +import { RefreshIcon } from '../../icons'; + +/** + * @description Toggle "advanced" / "basic" mode. + * @returns {JSX.Element} + */ +export const SwitchModePopover = () => { + const { settings, saveSettings } = useSettings(); + + /** + * @description Toggle mode. + */ + const handleSwitch = () => + saveSettings({ + ...settings, + isAdvancedMode: !settings.isAdvancedMode + }); + + return ( + + + + + + ); +}; diff --git a/frontend/src/components/popovers/ThemePopover.js b/frontend/src/design/components/popovers/ThemePopover.js similarity index 81% rename from frontend/src/components/popovers/ThemePopover.js rename to frontend/src/design/components/popovers/ThemePopover.js index c8da773ec..88846b268 100644 --- a/frontend/src/components/popovers/ThemePopover.js +++ b/frontend/src/design/components/popovers/ThemePopover.js @@ -1,11 +1,10 @@ -import { useEffect, useState } from 'react'; import { IconButton, Tooltip } from '@mui/material'; -import useSettings from '../../hooks/useSettings'; +import { useEffect, useState } from 'react'; import { THEMES } from '../../constants'; -import MoonIcon from '../../icons/Moon'; -import SunIcon from '../../icons/Sun'; +import { useSettings } from '../../hooks'; +import { MoonIcon, SunIcon } from '../../icons'; -const ThemePopover = () => { +export const ThemePopover = () => { const { settings, saveSettings } = useSettings(); const [selectedTheme, setSelectedTheme] = useState(settings.theme); @@ -33,5 +32,3 @@ const ThemePopover = () => { ); }; - -export default ThemePopover; diff --git a/frontend/src/design/components/popovers/index.js b/frontend/src/design/components/popovers/index.js new file mode 100644 index 000000000..6ae796e72 --- /dev/null +++ b/frontend/src/design/components/popovers/index.js @@ -0,0 +1,4 @@ +export * from './AccountPopover'; +export * from './NotificationsPopover'; +export * from './SwitchModePopover'; +export * from './ThemePopover'; diff --git a/frontend/src/design/constants.js b/frontend/src/design/constants.js new file mode 100644 index 000000000..e00a11a37 --- /dev/null +++ b/frontend/src/design/constants.js @@ -0,0 +1,4 @@ +export const THEMES = { + LIGHT: 'LIGHT', + DARK: 'DARK' +}; diff --git a/frontend/src/design/contexts/SettingsContext.js b/frontend/src/design/contexts/SettingsContext.js new file mode 100644 index 000000000..78e1c1cc1 --- /dev/null +++ b/frontend/src/design/contexts/SettingsContext.js @@ -0,0 +1,76 @@ +import PropTypes from 'prop-types'; +import { createContext, useEffect, useState } from 'react'; +import { THEMES } from '../constants'; + +const initialSettings = { + compact: true, + responsiveFontSizes: true, + roundedCorners: true, + theme: THEMES.DARK, + isAdvancedMode: true, + tabIcons: false +}; + +const SETTINGS_KEY = 'settings'; + +export const restoreSettings = () => { + try { + const storedSettings = window.localStorage.getItem(SETTINGS_KEY); + if (storedSettings != null) { + return JSON.parse(storedSettings); + } + + const theme = window.matchMedia('(prefers-color-scheme: dark)').matches + ? THEMES.DARK + : THEMES.LIGHT; + + return { ...initialSettings, theme: theme }; + } catch (err) { + console.error(err); + return null; + } +}; + +export const storeSettings = (settings) => { + window.localStorage.setItem(SETTINGS_KEY, JSON.stringify(settings)); +}; + +export const SettingsContext = createContext({ + settings: initialSettings, + saveSettings: () => {} +}); + +export const SettingsProvider = (props) => { + const { children } = props; + const [settings, setSettings] = useState(initialSettings); + + useEffect(() => { + const restoredSettings = restoreSettings(); + + if (restoredSettings) { + setSettings(restoredSettings); + } + }, []); + + const saveSettings = (updatedSettings) => { + setSettings(updatedSettings); + storeSettings(updatedSettings); + }; + + return ( + + {children} + + ); +}; + +SettingsProvider.propTypes = { + children: PropTypes.node.isRequired +}; + +export const SettingsConsumer = SettingsContext.Consumer; diff --git a/frontend/src/design/contexts/index.js b/frontend/src/design/contexts/index.js new file mode 100644 index 000000000..f171b804d --- /dev/null +++ b/frontend/src/design/contexts/index.js @@ -0,0 +1 @@ +export * from './SettingsContext'; diff --git a/frontend/src/design/hooks/index.js b/frontend/src/design/hooks/index.js new file mode 100644 index 000000000..40fc37a27 --- /dev/null +++ b/frontend/src/design/hooks/index.js @@ -0,0 +1,3 @@ +export * from './useCardStyle'; +export * from './useScrollReset'; +export * from './useSettings'; diff --git a/frontend/src/hooks/useCardStyle.js b/frontend/src/design/hooks/useCardStyle.js similarity index 76% rename from frontend/src/hooks/useCardStyle.js rename to frontend/src/design/hooks/useCardStyle.js index 9fde9f3a3..418022753 100644 --- a/frontend/src/hooks/useCardStyle.js +++ b/frontend/src/design/hooks/useCardStyle.js @@ -1,6 +1,6 @@ import { makeStyles } from '@mui/styles'; -const useCardStyle = makeStyles((theme) => ({ +export const useCardStyle = makeStyles((theme) => ({ card: { boxShadow: '0 8px 40px -12px rgba(0,0,0,0.3)', '&:hover': { @@ -9,4 +9,3 @@ const useCardStyle = makeStyles((theme) => ({ } } })); -export default useCardStyle; diff --git a/frontend/src/hooks/useScrollReset.js b/frontend/src/design/hooks/useScrollReset.js similarity index 77% rename from frontend/src/hooks/useScrollReset.js rename to frontend/src/design/hooks/useScrollReset.js index 8378ec07d..23eb66051 100644 --- a/frontend/src/hooks/useScrollReset.js +++ b/frontend/src/design/hooks/useScrollReset.js @@ -1,7 +1,7 @@ import { useEffect } from 'react'; import { useLocation } from 'react-router-dom'; -const useScrollReset = () => { +export const useScrollReset = () => { const location = useLocation(); useEffect(() => { @@ -10,5 +10,3 @@ const useScrollReset = () => { return null; }; - -export default useScrollReset; diff --git a/frontend/src/design/hooks/useSettings.js b/frontend/src/design/hooks/useSettings.js new file mode 100644 index 000000000..8985fc669 --- /dev/null +++ b/frontend/src/design/hooks/useSettings.js @@ -0,0 +1,4 @@ +import { useContext } from 'react'; +import { SettingsContext } from '../contexts'; + +export const useSettings = () => useContext(SettingsContext); diff --git a/frontend/src/design/icons/AdjustmentsIcon.js b/frontend/src/design/icons/AdjustmentsIcon.js new file mode 100644 index 000000000..6d1216465 --- /dev/null +++ b/frontend/src/design/icons/AdjustmentsIcon.js @@ -0,0 +1,12 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const AdjustmentsIcon = createSvgIcon( + + + , + 'Adjustments' +); diff --git a/frontend/src/design/icons/ArchiveIcon.js b/frontend/src/design/icons/ArchiveIcon.js new file mode 100644 index 000000000..491fa7757 --- /dev/null +++ b/frontend/src/design/icons/ArchiveIcon.js @@ -0,0 +1,17 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ArchiveIcon = createSvgIcon( + + + + , + 'Archive' +); diff --git a/frontend/src/design/icons/ArrowLeftIcon.js b/frontend/src/design/icons/ArrowLeftIcon.js new file mode 100644 index 000000000..612f36dff --- /dev/null +++ b/frontend/src/design/icons/ArrowLeftIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ArrowLeftIcon = createSvgIcon( + + + , + 'ArrowLeft' +); diff --git a/frontend/src/design/icons/ArrowRightIcon.js b/frontend/src/design/icons/ArrowRightIcon.js new file mode 100644 index 000000000..492ebc927 --- /dev/null +++ b/frontend/src/design/icons/ArrowRightIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ArrowRightIcon = createSvgIcon( + + + , + 'ArrowRight' +); diff --git a/frontend/src/design/icons/ArrowsExpandIcon.js b/frontend/src/design/icons/ArrowsExpandIcon.js new file mode 100644 index 000000000..3fedc28dc --- /dev/null +++ b/frontend/src/design/icons/ArrowsExpandIcon.js @@ -0,0 +1,18 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ArrowsExpandIcon = createSvgIcon( + + + , + 'ArrowsExpandIcon' +); diff --git a/frontend/src/design/icons/BellIcon.js b/frontend/src/design/icons/BellIcon.js new file mode 100644 index 000000000..05b08020c --- /dev/null +++ b/frontend/src/design/icons/BellIcon.js @@ -0,0 +1,12 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const BellIcon = createSvgIcon( + + + , + 'Bell' +); diff --git a/frontend/src/design/icons/ChevronDownIcon.js b/frontend/src/design/icons/ChevronDownIcon.js new file mode 100644 index 000000000..d51ed3c78 --- /dev/null +++ b/frontend/src/design/icons/ChevronDownIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ChevronDownIcon = createSvgIcon( + + + , + 'ChevronDown' +); diff --git a/frontend/src/design/icons/ChevronLeftIcon.js b/frontend/src/design/icons/ChevronLeftIcon.js new file mode 100644 index 000000000..6fc36d8ff --- /dev/null +++ b/frontend/src/design/icons/ChevronLeftIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ChevronLeftIcon = createSvgIcon( + + + , + 'ChevronLeft' +); diff --git a/frontend/src/design/icons/ChevronRightIcon.js b/frontend/src/design/icons/ChevronRightIcon.js new file mode 100644 index 000000000..831a1e924 --- /dev/null +++ b/frontend/src/design/icons/ChevronRightIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ChevronRightIcon = createSvgIcon( + + + , + 'ChevronRight' +); diff --git a/frontend/src/design/icons/ChevronUpIcon.js b/frontend/src/design/icons/ChevronUpIcon.js new file mode 100644 index 000000000..8cac5aba4 --- /dev/null +++ b/frontend/src/design/icons/ChevronUpIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ChevronUpIcon = createSvgIcon( + + + , + 'ChevronUp' +); diff --git a/frontend/src/design/icons/CogIcon.js b/frontend/src/design/icons/CogIcon.js new file mode 100644 index 000000000..0d74e03ad --- /dev/null +++ b/frontend/src/design/icons/CogIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const CogIcon = createSvgIcon( + + + , + 'Cog' +); diff --git a/frontend/src/design/icons/DocumentTextIcon.js b/frontend/src/design/icons/DocumentTextIcon.js new file mode 100644 index 000000000..edbdeebb3 --- /dev/null +++ b/frontend/src/design/icons/DocumentTextIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const DocumentTextIcon = createSvgIcon( + + + , + 'DocumentText' +); diff --git a/frontend/src/design/icons/DotsHorizontalIcon.js b/frontend/src/design/icons/DotsHorizontalIcon.js new file mode 100644 index 000000000..2ea650b32 --- /dev/null +++ b/frontend/src/design/icons/DotsHorizontalIcon.js @@ -0,0 +1,12 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const DotsHorizontalIcon = createSvgIcon( + + + , + 'DotsHorizontal' +); diff --git a/frontend/src/design/icons/DownloadIcon.js b/frontend/src/design/icons/DownloadIcon.js new file mode 100644 index 000000000..193e8f4d1 --- /dev/null +++ b/frontend/src/design/icons/DownloadIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const DownloadIcon = createSvgIcon( + + + , + 'Download' +); diff --git a/frontend/src/design/icons/DuplicateIcon.js b/frontend/src/design/icons/DuplicateIcon.js new file mode 100644 index 000000000..b780f44c5 --- /dev/null +++ b/frontend/src/design/icons/DuplicateIcon.js @@ -0,0 +1,13 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const DuplicateIcon = createSvgIcon( + + + + , + 'DuplicateIcon' +); diff --git a/frontend/src/design/icons/ExternalLinkIcon.js b/frontend/src/design/icons/ExternalLinkIcon.js new file mode 100644 index 000000000..20bfd11cb --- /dev/null +++ b/frontend/src/design/icons/ExternalLinkIcon.js @@ -0,0 +1,13 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const ExternalLinkIcon = createSvgIcon( + + + + , + 'ExternalLink' +); diff --git a/frontend/src/design/icons/LockIcon.js b/frontend/src/design/icons/LockIcon.js new file mode 100644 index 000000000..bd1469c20 --- /dev/null +++ b/frontend/src/design/icons/LockIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const LockIcon = createSvgIcon( + + + , + 'Lock' +); diff --git a/frontend/src/design/icons/MenuIcon.js b/frontend/src/design/icons/MenuIcon.js new file mode 100644 index 000000000..67935eb73 --- /dev/null +++ b/frontend/src/design/icons/MenuIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const MenuIcon = createSvgIcon( + + + , + 'Menu' +); diff --git a/frontend/src/design/icons/MinusIcon.js b/frontend/src/design/icons/MinusIcon.js new file mode 100644 index 000000000..01197df43 --- /dev/null +++ b/frontend/src/design/icons/MinusIcon.js @@ -0,0 +1,18 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const MinusIcon = createSvgIcon( + + + , + 'Minus' +); diff --git a/frontend/src/design/icons/MoonIcon.js b/frontend/src/design/icons/MoonIcon.js new file mode 100644 index 000000000..c294fb9e3 --- /dev/null +++ b/frontend/src/design/icons/MoonIcon.js @@ -0,0 +1,12 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const MoonIcon = createSvgIcon( + + + , + 'Moon' +); diff --git a/frontend/src/design/icons/PencilAltIcon.js b/frontend/src/design/icons/PencilAltIcon.js new file mode 100644 index 000000000..7cc2a1dc5 --- /dev/null +++ b/frontend/src/design/icons/PencilAltIcon.js @@ -0,0 +1,17 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const PencilAltIcon = createSvgIcon( + + + + , + 'PencilAltIcon' +); diff --git a/frontend/src/design/icons/PlusIcon.js b/frontend/src/design/icons/PlusIcon.js new file mode 100644 index 000000000..0152c40c3 --- /dev/null +++ b/frontend/src/design/icons/PlusIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const PlusIcon = createSvgIcon( + + + , + 'Plus' +); diff --git a/frontend/src/design/icons/RefreshIcon.js b/frontend/src/design/icons/RefreshIcon.js new file mode 100644 index 000000000..ecec44766 --- /dev/null +++ b/frontend/src/design/icons/RefreshIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const RefreshIcon = createSvgIcon( + + + , + 'Refresh' +); diff --git a/frontend/src/design/icons/SaveIcon.js b/frontend/src/design/icons/SaveIcon.js new file mode 100644 index 000000000..33a857cab --- /dev/null +++ b/frontend/src/design/icons/SaveIcon.js @@ -0,0 +1,12 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const SaveIcon = createSvgIcon( + + + , + 'Save' +); diff --git a/frontend/src/design/icons/SearchIcon.js b/frontend/src/design/icons/SearchIcon.js new file mode 100644 index 000000000..c85aa927f --- /dev/null +++ b/frontend/src/design/icons/SearchIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const SearchIcon = createSvgIcon( + + + , + 'SearchIcon' +); diff --git a/frontend/src/design/icons/SunIcon.js b/frontend/src/design/icons/SunIcon.js new file mode 100644 index 000000000..b4887c975 --- /dev/null +++ b/frontend/src/design/icons/SunIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const SunIcon = createSvgIcon( + + + , + 'Sun' +); diff --git a/frontend/src/design/icons/UserIcon.js b/frontend/src/design/icons/UserIcon.js new file mode 100644 index 000000000..13b187c70 --- /dev/null +++ b/frontend/src/design/icons/UserIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const UserIcon = createSvgIcon( + + + , + 'User' +); diff --git a/frontend/src/design/icons/XIcon.js b/frontend/src/design/icons/XIcon.js new file mode 100644 index 000000000..18e852e8e --- /dev/null +++ b/frontend/src/design/icons/XIcon.js @@ -0,0 +1,16 @@ +import createSvgIcon from '@mui/material/utils/createSvgIcon'; + +export const XIcon = createSvgIcon( + + + , + 'X' +); diff --git a/frontend/src/design/icons/index.js b/frontend/src/design/icons/index.js new file mode 100644 index 000000000..da5922b34 --- /dev/null +++ b/frontend/src/design/icons/index.js @@ -0,0 +1,28 @@ +export * from './AdjustmentsIcon'; +export * from './ArchiveIcon'; +export * from './ArrowLeftIcon'; +export * from './ArrowRightIcon'; +export * from './ArrowsExpandIcon'; +export * from './BellIcon'; +export * from './ChevronDownIcon'; +export * from './ChevronLeftIcon'; +export * from './ChevronRightIcon'; +export * from './ChevronUpIcon'; +export * from './CogIcon'; +export * from './DocumentTextIcon'; +export * from './DotsHorizontalIcon'; +export * from './DownloadIcon'; +export * from './DuplicateIcon'; +export * from './ExternalLinkIcon'; +export * from './LockIcon'; +export * from './MenuIcon'; +export * from './MinusIcon'; +export * from './MoonIcon'; +export * from './PencilAltIcon'; +export * from './PlusIcon'; +export * from './RefreshIcon'; +export * from './SaveIcon'; +export * from './SearchIcon'; +export * from './SunIcon'; +export * from './UserIcon'; +export * from './XIcon'; diff --git a/frontend/src/design/index.js b/frontend/src/design/index.js new file mode 100644 index 000000000..afbe6b051 --- /dev/null +++ b/frontend/src/design/index.js @@ -0,0 +1,6 @@ +export * from './components'; +export * from './constants'; +export * from './contexts'; +export * from './hooks'; +export * from './icons'; +export * from './theme'; diff --git a/frontend/src/theme/BaseThemeOptions.js b/frontend/src/design/theme/BaseThemeOptions.js similarity index 100% rename from frontend/src/theme/BaseThemeOptions.js rename to frontend/src/design/theme/BaseThemeOptions.js diff --git a/frontend/src/theme/DarkThemeOptions.js b/frontend/src/design/theme/DarkThemeOptions.js similarity index 100% rename from frontend/src/theme/DarkThemeOptions.js rename to frontend/src/design/theme/DarkThemeOptions.js diff --git a/frontend/src/theme/LightThemeOptions.js b/frontend/src/design/theme/LightThemeOptions.js similarity index 100% rename from frontend/src/theme/LightThemeOptions.js rename to frontend/src/design/theme/LightThemeOptions.js diff --git a/frontend/src/design/theme/index.js b/frontend/src/design/theme/index.js new file mode 100644 index 000000000..42d796feb --- /dev/null +++ b/frontend/src/design/theme/index.js @@ -0,0 +1,33 @@ +import { createTheme, responsiveFontSizes } from '@mui/material/styles'; +import { THEMES } from '../constants'; +import { baseThemeOptions } from './BaseThemeOptions'; +import { darkThemeOptions } from './DarkThemeOptions'; +import { lightThemeOptions } from './LightThemeOptions'; + +export const createMaterialTheme = (config) => { + let theme = createTheme( + baseThemeOptions, + config.theme === THEMES.DARK ? darkThemeOptions : lightThemeOptions, + { + direction: config.direction + }, + { + ...(config.roundedCorners + ? { + shape: { + borderRadius: 16 + } + } + : { + shape: { + borderRadius: 8 + } + }) + } + ); + + if (config.responsiveFontSizes) { + theme = responsiveFontSizes(theme); + } + return theme; +}; diff --git a/frontend/src/store/errorReducer.js b/frontend/src/globalErrors/errorReducer.js similarity index 83% rename from frontend/src/store/errorReducer.js rename to frontend/src/globalErrors/errorReducer.js index 153653dc1..9eb3fc1b2 100644 --- a/frontend/src/store/errorReducer.js +++ b/frontend/src/globalErrors/errorReducer.js @@ -6,7 +6,7 @@ const initState = { isOpen: false }; -export function errorReducer(state = initState, action) { +export const errorReducer = function (state = initState, action) { const { error } = action; if (error) { @@ -22,4 +22,4 @@ export function errorReducer(state = initState, action) { }; } return state; -} +}; diff --git a/frontend/src/globalErrors/index.js b/frontend/src/globalErrors/index.js new file mode 100644 index 000000000..0db8b1fc7 --- /dev/null +++ b/frontend/src/globalErrors/index.js @@ -0,0 +1,3 @@ +export * from './store'; +export * from './errorReducer'; +export * from './rootReducer'; diff --git a/frontend/src/globalErrors/rootReducer.js b/frontend/src/globalErrors/rootReducer.js new file mode 100644 index 000000000..c27c00393 --- /dev/null +++ b/frontend/src/globalErrors/rootReducer.js @@ -0,0 +1,6 @@ +import { combineReducers } from '@reduxjs/toolkit'; +import { errorReducer } from './errorReducer'; + +export const rootReducer = combineReducers({ + error: errorReducer +}); diff --git a/frontend/src/globalErrors/store.js b/frontend/src/globalErrors/store.js new file mode 100644 index 000000000..14eb90d29 --- /dev/null +++ b/frontend/src/globalErrors/store.js @@ -0,0 +1,15 @@ +import { + useDispatch as useReduxDispatch, + useSelector as useReduxSelector +} from 'react-redux'; +import { configureStore } from '@reduxjs/toolkit'; +import { rootReducer } from './rootReducer'; + +export const store = configureStore({ + reducer: rootReducer, + devTools: process.env.REACT_APP_ENABLE_REDUX_DEV_TOOLS === 'true' +}); + +export const useSelector = useReduxSelector; + +export const useDispatch = () => useReduxDispatch(); diff --git a/frontend/src/hooks/useAuth.js b/frontend/src/hooks/useAuth.js deleted file mode 100644 index bf05b293f..000000000 --- a/frontend/src/hooks/useAuth.js +++ /dev/null @@ -1,10 +0,0 @@ -import { useContext } from 'react'; -import LocalContext from '../contexts/LocalContext'; -import AuthContext from '../contexts/AmplifyContext'; - -const useAuth = () => - useContext( - !process.env.REACT_APP_COGNITO_USER_POOL_ID ? LocalContext : AuthContext - ); - -export default useAuth; diff --git a/frontend/src/hooks/useSettings.js b/frontend/src/hooks/useSettings.js deleted file mode 100644 index 599a9f87a..000000000 --- a/frontend/src/hooks/useSettings.js +++ /dev/null @@ -1,6 +0,0 @@ -import { useContext } from 'react'; -import SettingsContext from '../contexts/SettingsContext'; - -const useSettings = () => useContext(SettingsContext); - -export default useSettings; diff --git a/frontend/src/icons/Archive.js b/frontend/src/icons/Archive.js deleted file mode 100644 index 37c1aca89..000000000 --- a/frontend/src/icons/Archive.js +++ /dev/null @@ -1,19 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Archive = createSvgIcon( - - - - , - 'Archive' -); - -export default Archive; diff --git a/frontend/src/icons/ArrowLeft.js b/frontend/src/icons/ArrowLeft.js deleted file mode 100644 index 058dd6193..000000000 --- a/frontend/src/icons/ArrowLeft.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ArrowLeft = createSvgIcon( - - - , - 'ArrowLeft' -); - -export default ArrowLeft; diff --git a/frontend/src/icons/ArrowRight.js b/frontend/src/icons/ArrowRight.js deleted file mode 100644 index e7e2f6d1d..000000000 --- a/frontend/src/icons/ArrowRight.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ArrowRight = createSvgIcon( - - - , - 'ArrowRight' -); - -export default ArrowRight; diff --git a/frontend/src/icons/ArrowsExpand.js b/frontend/src/icons/ArrowsExpand.js deleted file mode 100644 index 01850809f..000000000 --- a/frontend/src/icons/ArrowsExpand.js +++ /dev/null @@ -1,20 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ArrowsExpand = createSvgIcon( - - - , - 'ArrowsExpand' -); - -export default ArrowsExpand; diff --git a/frontend/src/icons/Bell.js b/frontend/src/icons/Bell.js deleted file mode 100644 index 328e4ce42..000000000 --- a/frontend/src/icons/Bell.js +++ /dev/null @@ -1,14 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Bell = createSvgIcon( - - - , - 'Bell' -); - -export default Bell; diff --git a/frontend/src/icons/ChevronDown.js b/frontend/src/icons/ChevronDown.js deleted file mode 100644 index 66af4b8a2..000000000 --- a/frontend/src/icons/ChevronDown.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ChevronDown = createSvgIcon( - - - , - 'ChevronDown' -); - -export default ChevronDown; diff --git a/frontend/src/icons/ChevronLeft.js b/frontend/src/icons/ChevronLeft.js deleted file mode 100644 index 37b9dc3c5..000000000 --- a/frontend/src/icons/ChevronLeft.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ChevronLeft = createSvgIcon( - - - , - 'ChevronLeft' -); - -export default ChevronLeft; diff --git a/frontend/src/icons/ChevronRight.js b/frontend/src/icons/ChevronRight.js deleted file mode 100644 index c2291bb0d..000000000 --- a/frontend/src/icons/ChevronRight.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ChevronRight = createSvgIcon( - - - , - 'ChevronRight' -); - -export default ChevronRight; diff --git a/frontend/src/icons/ChevronUp.js b/frontend/src/icons/ChevronUp.js deleted file mode 100644 index 6fc22f9d4..000000000 --- a/frontend/src/icons/ChevronUp.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ChevronUp = createSvgIcon( - - - , - 'ChevronUp' -); - -export default ChevronUp; diff --git a/frontend/src/icons/Cog.js b/frontend/src/icons/Cog.js deleted file mode 100644 index fb9a081aa..000000000 --- a/frontend/src/icons/Cog.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Cog = createSvgIcon( - - - , - 'Cog' -); - -export default Cog; diff --git a/frontend/src/icons/DocumentText.js b/frontend/src/icons/DocumentText.js deleted file mode 100644 index 667f39721..000000000 --- a/frontend/src/icons/DocumentText.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const DocumentText = createSvgIcon( - - - , - 'DocumentText' -); - -export default DocumentText; diff --git a/frontend/src/icons/DotsHorizontal.js b/frontend/src/icons/DotsHorizontal.js deleted file mode 100644 index 9f81203af..000000000 --- a/frontend/src/icons/DotsHorizontal.js +++ /dev/null @@ -1,14 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const DotsHorizontal = createSvgIcon( - - - , - 'DotsHorizontal' -); - -export default DotsHorizontal; diff --git a/frontend/src/icons/Download.js b/frontend/src/icons/Download.js deleted file mode 100644 index 3d3a35400..000000000 --- a/frontend/src/icons/Download.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Download = createSvgIcon( - - - , - 'Download' -); - -export default Download; diff --git a/frontend/src/icons/Duplicate.js b/frontend/src/icons/Duplicate.js deleted file mode 100644 index 5fc408990..000000000 --- a/frontend/src/icons/Duplicate.js +++ /dev/null @@ -1,15 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Duplicate = createSvgIcon( - - - - , - 'Duplicate' -); - -export default Duplicate; diff --git a/frontend/src/icons/ExternalLink.js b/frontend/src/icons/ExternalLink.js deleted file mode 100644 index 52813c9d8..000000000 --- a/frontend/src/icons/ExternalLink.js +++ /dev/null @@ -1,15 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const ExternalLink = createSvgIcon( - - - - , - 'ExternalLink' -); - -export default ExternalLink; diff --git a/frontend/src/icons/Lock.js b/frontend/src/icons/Lock.js deleted file mode 100644 index ad6c081ac..000000000 --- a/frontend/src/icons/Lock.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Lock = createSvgIcon( - - - , - 'Lock' -); - -export default Lock; diff --git a/frontend/src/icons/Menu.js b/frontend/src/icons/Menu.js deleted file mode 100644 index fd25713b1..000000000 --- a/frontend/src/icons/Menu.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Menu = createSvgIcon( - - - , - 'Menu' -); - -export default Menu; diff --git a/frontend/src/icons/Minus.js b/frontend/src/icons/Minus.js deleted file mode 100644 index dfe7b3eda..000000000 --- a/frontend/src/icons/Minus.js +++ /dev/null @@ -1,20 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Minus = createSvgIcon( - - - , - 'Minus' -); - -export default Minus; diff --git a/frontend/src/icons/Moon.js b/frontend/src/icons/Moon.js deleted file mode 100644 index c83b87cc0..000000000 --- a/frontend/src/icons/Moon.js +++ /dev/null @@ -1,14 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Moon = createSvgIcon( - - - , - 'Moon' -); - -export default Moon; diff --git a/frontend/src/icons/PencilAlt.js b/frontend/src/icons/PencilAlt.js deleted file mode 100644 index fbe15202c..000000000 --- a/frontend/src/icons/PencilAlt.js +++ /dev/null @@ -1,19 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const PencilAlt = createSvgIcon( - - - - , - 'PencilAlt' -); - -export default PencilAlt; diff --git a/frontend/src/icons/Plus.js b/frontend/src/icons/Plus.js deleted file mode 100644 index 0a4169305..000000000 --- a/frontend/src/icons/Plus.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Plus = createSvgIcon( - - - , - 'Plus' -); - -export default Plus; diff --git a/frontend/src/icons/Refresh.js b/frontend/src/icons/Refresh.js deleted file mode 100644 index 182b5a440..000000000 --- a/frontend/src/icons/Refresh.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Refresh = createSvgIcon( - - - , - 'Refresh' -); - -export default Refresh; diff --git a/frontend/src/icons/Save.js b/frontend/src/icons/Save.js deleted file mode 100644 index a0335482e..000000000 --- a/frontend/src/icons/Save.js +++ /dev/null @@ -1,14 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Save = createSvgIcon( - - - , - 'Save' -); - -export default Save; diff --git a/frontend/src/icons/Search.js b/frontend/src/icons/Search.js deleted file mode 100644 index 9f2c054ab..000000000 --- a/frontend/src/icons/Search.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Search = createSvgIcon( - - - , - 'Search' -); - -export default Search; diff --git a/frontend/src/icons/Sun.js b/frontend/src/icons/Sun.js deleted file mode 100644 index efaaaab0a..000000000 --- a/frontend/src/icons/Sun.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Sun = createSvgIcon( - - - , - 'Sun' -); - -export default Sun; diff --git a/frontend/src/icons/User.js b/frontend/src/icons/User.js deleted file mode 100644 index 6b9c8e882..000000000 --- a/frontend/src/icons/User.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const User = createSvgIcon( - - - , - 'User' -); - -export default User; diff --git a/frontend/src/icons/X.js b/frontend/src/icons/X.js deleted file mode 100644 index 497201e74..000000000 --- a/frontend/src/icons/X.js +++ /dev/null @@ -1,18 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const X = createSvgIcon( - - - , - 'X' -); - -export default X; diff --git a/frontend/src/icons/adjustments.js b/frontend/src/icons/adjustments.js deleted file mode 100644 index d786824fc..000000000 --- a/frontend/src/icons/adjustments.js +++ /dev/null @@ -1,13 +0,0 @@ -import createSvgIcon from '@mui/material/utils/createSvgIcon'; - -const Adjustments = createSvgIcon( - - - , - 'Adjustments' -); -export default Adjustments; diff --git a/frontend/src/index.js b/frontend/src/index.js index 165bf5fcd..d62a2a212 100644 --- a/frontend/src/index.js +++ b/frontend/src/index.js @@ -1,18 +1,18 @@ +import { AdapterDateFns } from '@mui/x-date-pickers/AdapterDateFns'; +import { LocalizationProvider } from '@mui/x-date-pickers/LocalizationProvider'; +import StyledEngineProvider from '@mui/material/StyledEngineProvider'; import 'nprogress/nprogress.css'; import { StrictMode } from 'react'; import ReactDOM from 'react-dom'; import { HelmetProvider } from 'react-helmet-async'; -import { BrowserRouter } from 'react-router-dom'; import { Provider as ReduxProvider } from 'react-redux'; -import { AdapterDateFns } from "@mui/x-date-pickers/AdapterDateFns"; -import { LocalizationProvider } from "@mui/x-date-pickers/LocalizationProvider"; -import StyledEngineProvider from '@mui/material/StyledEngineProvider'; -import App from './App'; -import { AuthProvider } from './contexts/AmplifyContext'; -import { SettingsProvider } from './contexts/SettingsContext'; -import reportWebVitals from './reportWebVitals'; +import { BrowserRouter } from 'react-router-dom'; +import { App } from './App'; +import { AuthProvider } from './authentication'; +import { SettingsProvider } from './design'; +import { store } from './globalErrors'; +import { reportWebVitals } from './reportWebVitals'; import * as serviceWorker from './serviceWorker'; -import store from './store'; ReactDOM.render( diff --git a/frontend/src/views/Administration/AdministrationTeams.js b/frontend/src/modules/Administration/components/AdministrationTeams.js similarity index 87% rename from frontend/src/views/Administration/AdministrationTeams.js rename to frontend/src/modules/Administration/components/AdministrationTeams.js index 14087b989..277339aef 100644 --- a/frontend/src/views/Administration/AdministrationTeams.js +++ b/frontend/src/modules/Administration/components/AdministrationTeams.js @@ -1,6 +1,4 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import * as BsIcons from 'react-icons/bs'; +import { LoadingButton } from '@mui/lab'; import { Box, Card, @@ -16,19 +14,22 @@ import { TextField } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { LoadingButton } from '@mui/lab'; import { useTheme } from '@mui/styles'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import * as BsIcons from 'react-icons/bs'; import { VscChecklist } from 'react-icons/vsc'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import listTenantGroups from '../../api/Tenant/listTenantGroups'; -import TeamPermissionsEditForm from './TeamPermissionsEditForm'; +import { + Defaults, + Pager, + RefreshTableMenu, + Scrollbar, + SearchIcon +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listTenantGroups } from '../services'; +import { TeamPermissionsEditForm } from './TeamPermissionsEditForm'; function TeamRow({ team, fetchItems }) { const theme = useTheme(); @@ -71,11 +72,12 @@ TeamRow.propTypes = { team: PropTypes.any, fetchItems: PropTypes.any }; -const AdministrationTeams = () => { + +export const AdministrationTeams = () => { const client = useClient(); const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const [loading, setLoading] = useState(true); const [inputValue, setInputValue] = useState(''); @@ -209,5 +211,3 @@ const AdministrationTeams = () => { }; AdministrationTeams.propTypes = {}; - -export default AdministrationTeams; diff --git a/frontend/src/modules/Administration/components/AdministratorDashboardViewer.js b/frontend/src/modules/Administration/components/AdministratorDashboardViewer.js new file mode 100644 index 000000000..8c660e0cb --- /dev/null +++ b/frontend/src/modules/Administration/components/AdministratorDashboardViewer.js @@ -0,0 +1,449 @@ +import { AddOutlined, ArrowRightAlt } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import { + Box, + Card, + CardContent, + CardHeader, + Container, + Divider, + Grid, + TextField, + Typography +} from '@mui/material'; +import { Formik } from 'formik'; +import { createRef, useCallback, useEffect, useState } from 'react'; +import * as ReactIf from 'react-if'; +import * as Yup from 'yup'; +import { useSettings } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getTrustAccount, useClient } from 'services'; +import { + createQuicksightDataSourceSet, + getMonitoringDashboardId, + getMonitoringVPCConnectionId, + getPlatformAuthorSession, + getPlatformReaderSession, + updateSSMParameter +} from '../services'; + +const QuickSightEmbedding = require('amazon-quicksight-embedding-sdk'); + +export const DashboardViewer = () => { + const dispatch = useDispatch(); + const client = useClient(); + const { settings } = useSettings(); + const [dashboardId, setDashboardId] = useState(''); + const [vpcConnectionId, setVpcConnectionId] = useState(''); + const [trustedAccount, setTrustedAccount] = useState(null); + const [dashboardRef] = useState(createRef()); + const [sessionUrl, setSessionUrl] = useState(null); + const [isOpeningSession, setIsOpeningSession] = useState(false); + const [isCreatingDataSource, setIsCreatingDataSource] = useState(false); + + const fetchTrustedAccount = useCallback(async () => { + const response = await client.query(getTrustAccount()); + if (!response.errors) { + setTrustedAccount(response.data.getTrustAccount); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }, [client, dispatch]); + + const fetchMonitoringVPCConnectionId = useCallback(async () => { + const response = await client.query(getMonitoringVPCConnectionId()); + if (!response.errors) { + setVpcConnectionId(response.data.getMonitoringVPCConnectionId); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }, [client, dispatch]); + + const fetchMonitoringDashboardId = useCallback(async () => { + const response = await client.query(getMonitoringDashboardId()); + if (!response.errors) { + setDashboardId(response.data.getMonitoringDashboardId); + if (response.data.getMonitoringDashboardId !== 'updateme') { + const resp = await client.query( + getPlatformReaderSession(response.data.getMonitoringDashboardId) + ); + if (!resp.errors) { + setSessionUrl(resp.data.getPlatformReaderSession); + const options = { + url: resp.data.getPlatformReaderSession, + scrolling: 'no', + height: '700px', + width: '100%', + locale: 'en-US', + footerPaddingEnabled: true, + sheetTabsDisabled: false, + printEnabled: false, + maximize: true, + container: dashboardRef.current + }; + QuickSightEmbedding.embedDashboard(options); + } else { + dispatch({ type: SET_ERROR, error: resp.errors[0].message }); + } + } + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }, [client, dispatch, dashboardRef]); + + useEffect(() => { + if (client) { + fetchMonitoringDashboardId().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + fetchMonitoringVPCConnectionId().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + fetchTrustedAccount().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [ + client, + dispatch, + fetchMonitoringDashboardId, + fetchMonitoringVPCConnectionId, + fetchTrustedAccount + ]); + + async function submitVpc(values, setStatus, setSubmitting, setErrors) { + try { + setVpcConnectionId(values.vpc); + const response = await client.mutate( + updateSSMParameter({ name: 'VPCConnectionId', value: values.vpc }) + ); + if (!response.errors) { + setStatus({ success: true }); + setSubmitting(false); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + setStatus({ success: false }); + setErrors({ submit: err.message }); + setSubmitting(false); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function submitDash(values, setStatus, setSubmitting, setErrors) { + try { + setDashboardId(values.dash); + const response = await client.mutate( + updateSSMParameter({ name: 'DashboardId', value: values.dash }) + ); + if (!response.errors) { + setStatus({ success: true }); + setSubmitting(false); + const resp = await client.query(getPlatformReaderSession(values.dash)); + if (!resp.errors) { + setSessionUrl(resp.data.getPlatformReaderSession); + const options = { + url: resp.data.getPlatformReaderSession, + scrolling: 'no', + height: '700px', + width: '100%', + locale: 'en-US', + footerPaddingEnabled: true, + sheetTabsDisabled: false, + printEnabled: false, + maximize: true, + container: dashboardRef.current + }; + QuickSightEmbedding.embedDashboard(options); + } else { + dispatch({ type: SET_ERROR, error: resp.errors[0].message }); + } + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + setStatus({ success: false }); + setErrors({ submit: err.message }); + setSubmitting(false); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function createQuicksightdata() { + setIsCreatingDataSource(true); + const response = await client.mutate( + createQuicksightDataSourceSet({ vpcConnectionId }) + ); + if (response.errors) { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setIsCreatingDataSource(false); + } + + const startAuthorSession = async () => { + setIsOpeningSession(true); + const response = await client.query( + getPlatformAuthorSession(trustedAccount) + ); + if (!response.errors) { + window.open(response.data.getPlatformAuthorSession, '_blank'); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setIsOpeningSession(false); + }; + + return ( + + + + + + + + + + 1. Enable Quicksight Enterprise Edition in AWS Account ={' '} + {trustedAccount}. Check the user guide for more details. + + + + + 2. Create a VPC Connection between Quicksight and RDS VPC. + Check the user guide for more details. + + + + + + + + + + + + + + 3. Introduce or Update the VPC Connection ID value in the + following box: + + + + + { + await submitVpc( + values, + setStatus, + setSubmitting, + setErrors + ); + }} + > + {({ + errors, + handleBlur, + handleChange, + handleSubmit, + isSubmitting, + setFieldValue, + touched, + values + }) => ( +
+ + + + + + + Save + + + +
+ )} +
+
+
+
+ + + + 4. Click on the button to automatically create the data + source connecting our RDS Aurora database with Quicksight + + + + + } + sx={{ mt: 1, mb: 2, ml: 2 }} + variant="outlined" + onClick={() => { + createQuicksightdata().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + }} + > + Create Quicksight data source + + + + +
+
+
+ + + + + + + + + 5. Go to Quicksight to build your Analysis and publish a + Dashboard. Check the user guide for more details. + + + + + } + variant="outlined" + onClick={startAuthorSession} + sx={{ mt: 1, mb: 2, ml: 2 }} + > + Start Quicksight session + + + + + + + + 6. Introduce or update your Dashboard ID + + + + + { + await submitDash( + values, + setStatus, + setSubmitting, + setErrors + ); + }} + > + {({ + errors, + handleBlur, + handleChange, + handleSubmit, + isSubmitting, + setFieldValue, + touched, + values + }) => ( +
+ + + + + + + Save + + + +
+ )} +
+
+
+
+
+
+
+ + + +
+ + + + + + ); +}; diff --git a/frontend/src/views/Administration/TeamPermissionsEditForm.js b/frontend/src/modules/Administration/components/TeamPermissionsEditForm.js similarity index 94% rename from frontend/src/views/Administration/TeamPermissionsEditForm.js rename to frontend/src/modules/Administration/components/TeamPermissionsEditForm.js index 4edfdad9d..22dd1688d 100644 --- a/frontend/src/views/Administration/TeamPermissionsEditForm.js +++ b/frontend/src/modules/Administration/components/TeamPermissionsEditForm.js @@ -1,6 +1,5 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { GroupAddOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -16,15 +15,17 @@ import { TextField, Typography } from '@mui/material'; -import { LoadingButton } from '@mui/lab'; -import { GroupAddOutlined } from '@mui/icons-material'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import listTenantPermissions from '../../api/Tenant/listTenantPermissions'; -import updateTenantGroupPermissions from '../../api/Tenant/updateTenantGroupPermissions'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { + listTenantPermissions, + updateTenantGroupPermissions +} from '../services'; -const TeamPermissionsEditForm = (props) => { +export const TeamPermissionsEditForm = (props) => { const { team, onClose, open, reloadTeams, ...other } = props; const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -218,5 +219,3 @@ TeamPermissionsEditForm.propTypes = { reloadTeams: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default TeamPermissionsEditForm; diff --git a/frontend/src/modules/Administration/components/index.js b/frontend/src/modules/Administration/components/index.js new file mode 100644 index 000000000..06ef79939 --- /dev/null +++ b/frontend/src/modules/Administration/components/index.js @@ -0,0 +1,3 @@ +export * from './AdministrationTeams'; +export * from './AdministratorDashboardViewer'; +export * from './TeamPermissionsEditForm'; diff --git a/frontend/src/modules/Administration/services/createQuicksightDataSourceSet.js b/frontend/src/modules/Administration/services/createQuicksightDataSourceSet.js new file mode 100644 index 000000000..54126600b --- /dev/null +++ b/frontend/src/modules/Administration/services/createQuicksightDataSourceSet.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const createQuicksightDataSourceSet = ({ vpcConnectionId }) => ({ + variables: { + vpcConnectionId + }, + mutation: gql` + mutation createQuicksightDataSourceSet($vpcConnectionId: String!) { + createQuicksightDataSourceSet(vpcConnectionId: $vpcConnectionId) + } + ` +}); diff --git a/frontend/src/modules/Administration/services/getMonitoringDashboardId.js b/frontend/src/modules/Administration/services/getMonitoringDashboardId.js new file mode 100644 index 000000000..2396193aa --- /dev/null +++ b/frontend/src/modules/Administration/services/getMonitoringDashboardId.js @@ -0,0 +1,9 @@ +import { gql } from 'apollo-boost'; + +export const getMonitoringDashboardId = () => ({ + query: gql` + query getMonitoringDashboardId { + getMonitoringDashboardId + } + ` +}); diff --git a/frontend/src/modules/Administration/services/getMonitoringVPCConnectionId.js b/frontend/src/modules/Administration/services/getMonitoringVPCConnectionId.js new file mode 100644 index 000000000..fdeb62ce7 --- /dev/null +++ b/frontend/src/modules/Administration/services/getMonitoringVPCConnectionId.js @@ -0,0 +1,9 @@ +import { gql } from 'apollo-boost'; + +export const getMonitoringVPCConnectionId = () => ({ + query: gql` + query getMonitoringVPCConnectionId { + getMonitoringVPCConnectionId + } + ` +}); diff --git a/frontend/src/modules/Administration/services/getPlatformAuthorSession.js b/frontend/src/modules/Administration/services/getPlatformAuthorSession.js new file mode 100644 index 000000000..1a2c7d805 --- /dev/null +++ b/frontend/src/modules/Administration/services/getPlatformAuthorSession.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getPlatformAuthorSession = (awsAccount) => ({ + variables: { + awsAccount + }, + query: gql` + query getPlatformAuthorSession($awsAccount: String) { + getPlatformAuthorSession(awsAccount: $awsAccount) + } + ` +}); diff --git a/frontend/src/modules/Administration/services/getPlatformReaderSession.js b/frontend/src/modules/Administration/services/getPlatformReaderSession.js new file mode 100644 index 000000000..0ec37b8ce --- /dev/null +++ b/frontend/src/modules/Administration/services/getPlatformReaderSession.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getPlatformReaderSession = (dashboardId) => ({ + variables: { + dashboardId + }, + query: gql` + query getPlatformReaderSession($dashboardId: String) { + getPlatformReaderSession(dashboardId: $dashboardId) + } + ` +}); diff --git a/frontend/src/modules/Administration/services/index.js b/frontend/src/modules/Administration/services/index.js new file mode 100644 index 000000000..a07cd1853 --- /dev/null +++ b/frontend/src/modules/Administration/services/index.js @@ -0,0 +1,9 @@ +export * from './createQuicksightDataSourceSet'; +export * from './getMonitoringDashboardId'; +export * from './getMonitoringVPCConnectionId'; +export * from './getPlatformAuthorSession'; +export * from './getPlatformReaderSession'; +export * from './listTenantGroups'; +export * from './listTenantPermissions'; +export * from './updateSSMParameter'; +export * from './updateTenantGroupPermissions'; diff --git a/frontend/src/api/Tenant/listTenantGroups.js b/frontend/src/modules/Administration/services/listTenantGroups.js similarity index 84% rename from frontend/src/api/Tenant/listTenantGroups.js rename to frontend/src/modules/Administration/services/listTenantGroups.js index 93bb36ef1..74d810ccf 100644 --- a/frontend/src/api/Tenant/listTenantGroups.js +++ b/frontend/src/modules/Administration/services/listTenantGroups.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listTenantGroups = (filter) => ({ +export const listTenantGroups = (filter) => ({ variables: { filter }, @@ -23,5 +23,3 @@ const listTenantGroups = (filter) => ({ } ` }); - -export default listTenantGroups; diff --git a/frontend/src/modules/Administration/services/listTenantPermissions.js b/frontend/src/modules/Administration/services/listTenantPermissions.js new file mode 100644 index 000000000..fd38f3f5c --- /dev/null +++ b/frontend/src/modules/Administration/services/listTenantPermissions.js @@ -0,0 +1,15 @@ +import { gql } from 'apollo-boost'; + +export const listTenantPermissions = (filter) => ({ + variables: { + filter + }, + query: gql` + query listTenantPermissions { + listTenantPermissions { + name + description + } + } + ` +}); diff --git a/frontend/src/modules/Administration/services/updateSSMParameter.js b/frontend/src/modules/Administration/services/updateSSMParameter.js new file mode 100644 index 000000000..7d4e486a5 --- /dev/null +++ b/frontend/src/modules/Administration/services/updateSSMParameter.js @@ -0,0 +1,13 @@ +import { gql } from 'apollo-boost'; + +export const updateSSMParameter = ({ name, value }) => ({ + variables: { + name, + value + }, + mutation: gql` + mutation updateSSMParameter($name: String!, $value: String!) { + updateSSMParameter(name: $name, value: $value) + } + ` +}); diff --git a/frontend/src/modules/Administration/services/updateTenantGroupPermissions.js b/frontend/src/modules/Administration/services/updateTenantGroupPermissions.js new file mode 100644 index 000000000..bc998369a --- /dev/null +++ b/frontend/src/modules/Administration/services/updateTenantGroupPermissions.js @@ -0,0 +1,14 @@ +import { gql } from 'apollo-boost'; + +export const updateTenantGroupPermissions = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation updateGroupTenantPermissions( + $input: UpdateGroupTenantPermissionsInput! + ) { + updateGroupTenantPermissions(input: $input) + } + ` +}); diff --git a/frontend/src/views/Administration/AdministrationView.js b/frontend/src/modules/Administration/views/AdministrationView.js similarity index 88% rename from frontend/src/views/Administration/AdministrationView.js rename to frontend/src/modules/Administration/views/AdministrationView.js index db2be01a3..cc0235d07 100644 --- a/frontend/src/views/Administration/AdministrationView.js +++ b/frontend/src/modules/Administration/views/AdministrationView.js @@ -1,6 +1,3 @@ -import { useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; import { Box, Breadcrumbs, @@ -12,13 +9,16 @@ import { Tabs, Typography } from '@mui/material'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import AdministrationTeams from './AdministrationTeams'; -import DashboardViewer from './AdministratorDashboardViewer' - +import { useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink } from 'react-router-dom'; +import { ChevronRightIcon, useSettings } from 'design'; +import { AdministrationTeams, DashboardViewer } from '../components'; -const tabs = [{ label: 'Teams', value: 'teams' },{ label: 'Monitoring', value: 'dashboard' }]; +const tabs = [ + { label: 'Teams', value: 'teams' }, + { label: 'Monitoring', value: 'dashboard' } +]; const AdministrationView = () => { const { settings } = useSettings(); @@ -98,4 +98,3 @@ const AdministrationView = () => { }; export default AdministrationView; - diff --git a/frontend/src/modules/Catalog/components/GlossarySearchResultItem.js b/frontend/src/modules/Catalog/components/GlossarySearchResultItem.js new file mode 100644 index 000000000..48d665bde --- /dev/null +++ b/frontend/src/modules/Catalog/components/GlossarySearchResultItem.js @@ -0,0 +1,340 @@ +import { LockOpen, ThumbUp } from '@mui/icons-material'; +import { + Box, + Card, + Chip, + CircularProgress, + Divider, + Grid, + IconButton, + Link, + Tooltip, + Typography +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React, { useState } from 'react'; +import * as BsIcons from 'react-icons/bs'; +import * as FaIcons from 'react-icons/fa'; +import * as FiIcons from 'react-icons/fi'; +import { MdShowChart } from 'react-icons/md'; +import * as ReactIf from 'react-if'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, useCardStyle } from 'design'; +import { dayjs } from 'utils'; +import { RequestAccessModal } from './RequestAccessModal'; +import { RequestDashboardAccessModal } from './RequestDashboardAccessModal'; + +const HitICon = ({ hit }) => ( + + + } /> + + + } /> + + + } /> + + + } /> + + +); + +HitICon.propTypes = { + hit: PropTypes.object.isRequired +}; + +export const GlossarySearchResultItem = ({ hit }) => { + const classes = useCardStyle(); + const [isRequestAccessOpen, setIsRequestAccessOpen] = useState(false); + const [isOpeningModal, setIsOpeningModal] = useState(false); + const [isRequestDashboardAccessOpen, setIsRequestDashboardAccessOpen] = + useState(false); + const [isOpeningDashboardModal, setIsOpeningDashboardModal] = useState(false); + const handleRequestAccessModalOpen = () => { + setIsOpeningModal(true); + setIsRequestAccessOpen(true); + }; + + const handleRequestAccessModalClose = () => { + setIsRequestAccessOpen(false); + }; + + const handleRequestDashboardAccessModalOpen = () => { + setIsOpeningDashboardModal(true); + setIsRequestDashboardAccessOpen(true); + }; + + const handleRequestDashboardAccessModalClose = () => { + setIsOpeningDashboardModal(false); + setIsRequestDashboardAccessOpen(false); + }; + + return ( + + + + + + {hit.resourceKind === 'dataset' && ( + + {hit.label} + + )} + {hit.resourceKind === 'table' && ( + + {hit.label} + + )} + {hit.resourceKind === 'folder' && ( + + {hit.label} + + )} + {hit.resourceKind === 'dashboard' && ( + + {hit.label} + + )} + + by{' '} + + {hit.owner} + {' '} + | created {dayjs(hit.created).fromNow()} + + + + + + + + {hit.description || 'No description provided'} + + + + + + + + Team + + + + + + {hit.admins || '-'} + + + + + + + + + + + {' Environment'} + + + + + + {hit.environmentName || '-'} + + + + + + + + + + Region + + + + + {hit.region} + + + + + + {hit.tags && hit.tags.length > 0 && ( + + {hit.topics.concat(hit.tags.slice(0, 5)).map((tag) => ( + + {tag} + + } + variant="filled" + /> + ))} + + )} + + + + + {isOpeningModal || isOpeningDashboardModal ? ( + + ) : ( + + + hit.resourceKind === 'dashboard' + ? handleRequestDashboardAccessModalOpen() + : handleRequestAccessModalOpen() + } + > + + + + )} + setIsOpeningModal(false)} + /> + setIsOpeningDashboardModal(false)} + /> + + + {(hit.resourceKind === 'dashboard' || hit.resourceKind === 'dataset') && + hit.upvotes !== undefined && + hit.upvotes >= 0 && ( + + + + + + + + {hit.upvotes} + + + + )} + + + ); +}; +GlossarySearchResultItem.propTypes = { + hit: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/Catalog/components/GlossarySearchUI.js b/frontend/src/modules/Catalog/components/GlossarySearchUI.js new file mode 100644 index 000000000..5920fa485 --- /dev/null +++ b/frontend/src/modules/Catalog/components/GlossarySearchUI.js @@ -0,0 +1,321 @@ +import ArrowDropDown from '@mui/icons-material/ArrowDropDown'; +import ArrowRight from '@mui/icons-material/ArrowRight'; +import { TreeItem, TreeView } from '@mui/lab'; +import { Box, CircularProgress, Typography } from '@mui/material'; +import { makeStyles } from '@mui/styles'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import * as BsIcons from 'react-icons/bs'; +import { Defaults, Scrollbar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { searchGlossary, useClient } from 'services'; +import { listToTree } from 'utils'; + +const useTreeItemStyles = makeStyles((theme) => ({ + root: { + color: theme.palette.text.secondary, + '&:focus > $content, &$selected > $content': { + backgroundColor: `var(--tree-view-bg-color, ${theme.palette.grey[400]})`, + color: 'var(--tree-view-color)' + }, + '&:focus > $content $label, &:hover > $content $label, &$selected > $content $label': + { + backgroundColor: 'transparent' + } + }, + content: { + color: theme.palette.text.secondary, + borderTopRightRadius: theme.spacing(2), + borderBottomRightRadius: theme.spacing(2), + paddingRight: theme.spacing(1), + fontWeight: theme.typography.fontWeightMedium, + '$expanded > &': { + fontWeight: theme.typography.fontWeightRegular + } + }, + group: { + marginLeft: 0, + '& $content': { + paddingLeft: theme.spacing(2) + } + }, + expanded: {}, + selected: {}, + label: { + fontWeight: 'inherit', + color: 'inherit' + }, + labelRoot: { + display: 'flex', + alignItems: 'center', + padding: theme.spacing(1, 0.5) + }, + labelIcon: { + marginRight: theme.spacing(1) + }, + labelText: { + fontWeight: 'inherit', + flexGrow: 1 + } +})); + +function StyledTreeItem(props) { + const classes = useTreeItemStyles(); + const { + labelText, + labelIcon: LabelIcon, + labelInfo, + color, + bgColor, + ...other + } = props; + + return ( + + + + {labelText} + + + {labelInfo} + +
+ } + style={{ + '--tree-view-color': color, + '--tree-view-bg-color': bgColor + }} + classes={{ + root: classes.root, + content: classes.content, + expanded: classes.expanded, + selected: classes.selected, + group: classes.group, + label: classes.label + }} + {...other} + /> + ); +} + +StyledTreeItem.propTypes = { + bgColor: PropTypes.string, + color: PropTypes.string, + labelIcon: PropTypes.elementType.isRequired, + labelInfo: PropTypes.string, + labelText: PropTypes.string.isRequired +}; + +const useStyles = makeStyles({ + root: { + height: 264, + flexGrow: 1, + maxWidth: 400 + } +}); + +export const GlossarySearchUI = ({ matches, setQuery }) => { + const client = useClient(); + const classes = useStyles(); + const dispatch = useDispatch(); + const [tree, setTree] = useState([]); + const [fetchingItems, setFetchingItems] = useState(true); + const [selectedTerms] = useState(matches.map((match) => match.key)); + const getIcon = (nodeItem) => { + if (nodeItem.__typename === 'Glossary') { + return ; + } + if (nodeItem.__typename === 'Category') { + return ; + } + return ; + }; + const select = (node) => { + const terms = [node.nodeUri]; + + setQuery({ + query: { + terms: { + glossary: terms.map((p) => p.toLowerCase()) + } + }, + value: [node.label] + }); + }; + const unselect = (node) => { + const terms = [node.nodeUri]; + + setQuery({ + query: { + terms: { + glossary: terms.map((p) => p.toLowerCase()) + } + }, + value: [node.label] + }); + }; + const isSelected = (node) => selectedTerms.indexOf(node.nodeUri) !== -1; + + const toggle = (node) => { + if (isSelected(node)) { + unselect(node); + } else { + select(node); + } + }; + const fetchItems = useCallback(async () => { + setFetchingItems(true); + const response = await client.query( + searchGlossary(Defaults.selectListFilter) + ); + if (!response.errors) { + setTree( + listToTree(response.data.searchGlossary.nodes, { + idKey: 'nodeUri', + parentKey: 'parentUri' + }) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setFetchingItems(false); + }, [client, dispatch]); + useEffect(() => { + if (client) { + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchItems]); + return ( + + {fetchingItems ? ( + + ) : ( + + {tree && tree.length > 0 ? ( + + + + } + defaultExpandIcon={} + defaultEndIcon={
} + > + {tree.map((node) => ( + toggle(node)} + labelText={ + + + {node.label} + + + } + labelIcon={() => getIcon(node)} + > + {node.children && + node.children.map((category) => ( + toggle(category)} + labelText={ + + + {category.label} + + + } + labelIcon={() => getIcon(category)} + > + {category.children && + category.children.map((term) => ( + + + {term.label} + + + } + labelIcon={() => getIcon(term)} + color="#1a73e8" + bgColor="#e8f0fe" + onClick={() => toggle(term)} + /> + ))} + + ))} + + ))} + + + + + ) : ( + + + No glossaries found + + + )} + + )} + + ); +}; +GlossarySearchUI.propTypes = { + setQuery: PropTypes.func.isRequired, + matches: PropTypes.array.isRequired +}; diff --git a/frontend/src/modules/Catalog/components/GlossarySearchWrapper.js b/frontend/src/modules/Catalog/components/GlossarySearchWrapper.js new file mode 100644 index 000000000..8982a2a2f --- /dev/null +++ b/frontend/src/modules/Catalog/components/GlossarySearchWrapper.js @@ -0,0 +1,34 @@ +import { ReactiveComponent } from '@appbaseio/reactivesearch'; +import { Box } from '@mui/material'; +import React from 'react'; +import { GlossarySearchUI } from './GlossarySearchUI'; + +export const GlossarySearchWrapper = (innerClass) => ( + + ({ + aggs: { + glossary: { + terms: { + field: 'glossary' + } + } + } + })} + render={({ aggregations, setQuery }) => { + let matches = []; + if ( + aggregations && + aggregations.glossary && + aggregations.glossary.buckets.length + ) { + matches = aggregations.glossary.buckets; + } + return ; + }} + /> + +); diff --git a/frontend/src/views/Catalog/RequestAccessModal.js b/frontend/src/modules/Catalog/components/RequestAccessModal.js similarity index 88% rename from frontend/src/views/Catalog/RequestAccessModal.js rename to frontend/src/modules/Catalog/components/RequestAccessModal.js index 970d500aa..21570e54a 100644 --- a/frontend/src/views/Catalog/RequestAccessModal.js +++ b/frontend/src/modules/Catalog/components/RequestAccessModal.js @@ -1,5 +1,5 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import SendIcon from '@mui/icons-material/Send'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -11,21 +11,22 @@ import { Typography } from '@mui/material'; import { Formik } from 'formik'; -import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; import React, { useCallback, useEffect, useState } from 'react'; -import SendIcon from '@mui/icons-material/Send'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import createShareObject from '../../api/ShareObject/createShareObject'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import listEnvironmentConsumptionRoles from '../../api/Environment/listEnvironmentConsumptionRoles'; -import requestDashboardShare from '../../api/Dashboard/requestDashboardShare'; -import * as Defaults from '../../components/defaults'; +import * as Yup from 'yup'; +import { Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + createShareObject, + listEnvironmentConsumptionRoles, + listEnvironmentGroups, + listEnvironments, + requestDashboardShare, + useClient +} from 'services'; -const RequestAccessModal = (props) => { +export const RequestAccessModal = (props) => { const { hit, onApply, onClose, open, stopLoader, ...other } = props; const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -39,7 +40,7 @@ const RequestAccessModal = (props) => { const fetchEnvironments = useCallback(async () => { const response = await client.query( listEnvironments({ - filter: Defaults.SelectListFilter + filter: Defaults.selectListFilter }) ); if (!response.errors) { @@ -63,7 +64,7 @@ const RequestAccessModal = (props) => { try { const response = await client.query( listEnvironmentGroups({ - filter: Defaults.SelectListFilter, + filter: Defaults.selectListFilter, environmentUri }) ); @@ -95,14 +96,14 @@ const RequestAccessModal = (props) => { term: '', groupUri: groupUri }, - environmentUri, + environmentUri }) ); if (!response.errors) { setRoleOptions( response.data.listEnvironmentConsumptionRoles.nodes.map((g) => ({ value: g.consumptionRoleUri, - label: [g.consumptionRoleName,' [',g.IAMRoleArn,']'].join(''), + label: [g.consumptionRoleName, ' [', g.IAMRoleArn, ']'].join('') })) ); } else { @@ -126,8 +127,10 @@ const RequestAccessModal = (props) => { async function submit(values, setStatus, setSubmitting, setErrors) { try { let response; - let type = values.consumptionRole? 'ConsumptionRole' : 'Group'; - let principal = values.consumptionRole? values.consumptionRole : values.groupUri; + let type = values.consumptionRole ? 'ConsumptionRole' : 'Group'; + let principal = values.consumptionRole + ? values.consumptionRole + : values.groupUri; if (hit.resourceKind === 'dataset') { response = await client.mutate( createShareObject({ @@ -220,7 +223,9 @@ const RequestAccessModal = (props) => { Request Access - Data access is requested for the whole requester Team or for the selected Consumption role. The request will be submitted to the data owners, track its progress in the Shares menu on the left. + Data access is requested for the whole requester Team or for the + selected Consumption role. The request will be submitted to the data + owners, track its progress in the Shares menu on the left. { onChange={(event) => { setFieldValue('consumptionRole', ''); fetchRoles( - values.environment.environmentUri, event.target.value + values.environment.environmentUri, + event.target.value ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) + dispatch({ + type: SET_ERROR, + error: e.message + }) ); setFieldValue('groupUri', event.target.value); }} @@ -390,24 +399,28 @@ const RequestAccessModal = (props) => { {roleOptions.length > 0 ? ( { - setFieldValue('consumptionRole', event.target.value); + setFieldValue( + 'consumptionRole', + event.target.value + ); }} select value={values.consumptionRole} variant="outlined" > {roleOptions.map((role) => ( - + {role.label} ))} @@ -415,9 +428,13 @@ const RequestAccessModal = (props) => { ) : ( { +export const RequestDashboardAccessModal = (props) => { const { hit, onApply, onClose, open, stopLoader, ...other } = props; const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -195,5 +192,3 @@ RequestDashboardAccessModal.propTypes = { open: PropTypes.bool.isRequired, stopLoader: PropTypes.func }; - -export default RequestDashboardAccessModal; diff --git a/frontend/src/modules/Catalog/components/index.js b/frontend/src/modules/Catalog/components/index.js new file mode 100644 index 000000000..b56943fb6 --- /dev/null +++ b/frontend/src/modules/Catalog/components/index.js @@ -0,0 +1,5 @@ +export * from './GlossarySearchUI'; +export * from './GlossarySearchWrapper'; +export * from './GlossarySearchResultItem'; +export * from './RequestAccessModal'; +export * from './RequestDashboardAccessModal'; diff --git a/frontend/src/views/Catalog/Catalog.js b/frontend/src/modules/Catalog/views/Catalog.js similarity index 95% rename from frontend/src/views/Catalog/Catalog.js rename to frontend/src/modules/Catalog/views/Catalog.js index 696fd448f..c52c3b42f 100644 --- a/frontend/src/views/Catalog/Catalog.js +++ b/frontend/src/modules/Catalog/views/Catalog.js @@ -1,4 +1,3 @@ -import React, { useEffect, useRef, useState } from 'react'; import { DataSearch, MultiList, @@ -6,7 +5,6 @@ import { ReactiveList, SelectedFilters } from '@appbaseio/reactivesearch'; -import CircularProgress from '@mui/material/CircularProgress'; import { Box, Breadcrumbs, @@ -19,18 +17,21 @@ import { Popover, Typography } from '@mui/material'; -import { Link as RouterLink } from 'react-router-dom'; +import CircularProgress from '@mui/material/CircularProgress'; import { makeStyles, useTheme } from '@mui/styles'; -import { Helmet } from 'react-helmet-async'; import * as PropTypes from 'prop-types'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import useToken from '../../hooks/useToken'; -import { THEMES } from '../../constants'; -import Hit from './Hit'; -import ChevronDown from '../../icons/ChevronDown'; -import GlossarySearchComponent from './GlossarySearchComponent'; +import React, { useEffect, useRef, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink } from 'react-router-dom'; +import { useToken } from 'authentication'; +import { + ChevronDownIcon, + ChevronRightIcon, + PlusIcon, + THEMES, + useSettings +} from 'design'; +import { GlossarySearchWrapper, GlossarySearchResultItem } from '../components'; const useStyles = makeStyles((theme) => ({ mainSearch: { @@ -59,7 +60,7 @@ function CatalogFilter(props) { <> + {isTeamInviteModalOpen && ( + + )} + + + + + + + + Name + IAM Role + Athena WorkGroup + Permissions + Actions + + + {loading ? ( + + ) : ( + + {items.nodes.length > 0 ? ( + items.nodes.map((team) => ( + + )) + ) : ( + + No Team invited + + )} + + )} +
+ {!loading && items.nodes.length > 0 && ( + + )} +
+
+ + + + + } + title={ + + {' '} + Environment Consumption IAM roles + + } + /> + + + + + + + + ) + }} + onChange={handleInputChangeRoles} + onKeyUp={handleInputKeyupRoles} + placeholder="Search" + value={inputValueRoles} + variant="outlined" + /> + + + + + {isAddRoleModalOpen && ( + + )} + + + + + + + + Name + IAM Role + Role Owner + Action + + + {loading ? ( + + ) : ( + + {roles.nodes.length > 0 ? ( + roles.nodes.map((role) => ( + + {role.consumptionRoleName} + {role.IAMRoleArn} + {role.groupUri} + + + removeConsumptionRole(role.consumptionRoleUri) + } + > + + + + + )) + ) : ( + + No Consumption IAM Role added + + )} + + )} +
+ {!loading && roles.nodes.length > 0 && ( + + )} +
+
+
+
+ + ); +}; + +EnvironmentTeams.propTypes = { + environment: PropTypes.object.isRequired +}; diff --git a/frontend/src/views/Networks/NetworkCreateModal.js b/frontend/src/modules/Environments/components/NetworkCreateModal.js similarity index 95% rename from frontend/src/views/Networks/NetworkCreateModal.js rename to frontend/src/modules/Environments/components/NetworkCreateModal.js index 9c032fba2..c0cd01e5b 100644 --- a/frontend/src/views/Networks/NetworkCreateModal.js +++ b/frontend/src/modules/Environments/components/NetworkCreateModal.js @@ -1,5 +1,4 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -12,18 +11,16 @@ import { Typography } from '@mui/material'; import { Formik } from 'formik'; -import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; import React, { useCallback, useEffect, useState } from 'react'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import ChipInput from '../../components/TagsInput'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import createNetwork from '../../api/Vpc/createNetwork'; -import * as Defaults from '../../components/defaults'; +import * as Yup from 'yup'; +import { ChipInput, Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, useClient } from 'services'; +import { createNetwork } from '../services'; -const NetworkCreateModal = (props) => { +export const NetworkCreateModal = (props) => { const { environment, onApply, onClose, open, reloadNetworks, ...other } = props; const { enqueueSnackbar } = useSnackbar(); @@ -35,7 +32,7 @@ const NetworkCreateModal = (props) => { try { const response = await client.query( listEnvironmentGroups({ - filter: Defaults.SelectListFilter, + filter: Defaults.selectListFilter, environmentUri: environment.environmentUri }) ); @@ -301,5 +298,3 @@ NetworkCreateModal.propTypes = { reloadNetworks: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default NetworkCreateModal; diff --git a/frontend/src/modules/Environments/components/index.js b/frontend/src/modules/Environments/components/index.js new file mode 100644 index 000000000..afccd1235 --- /dev/null +++ b/frontend/src/modules/Environments/components/index.js @@ -0,0 +1,14 @@ +export * from './EnvironmentConsoleAccess'; +export * from './EnvironmentDatasets'; +export * from './EnvironmentFeatures'; +export * from './EnvironmentListItem'; +export * from './EnvironmentNetworks'; +export * from './EnvironmentOverview'; +export * from './EnvironmentOwnedDatasets'; +export * from './EnvironmentRoleAddForm'; +export * from './EnvironmentSharedDatasets'; +export * from './EnvironmentSubscriptions'; +export * from './EnvironmentTeamInviteEditForm'; +export * from './EnvironmentTeamInviteForm'; +export * from './EnvironmentTeams'; +export * from './NetworkCreateModal'; diff --git a/frontend/src/modules/Environments/services/addConsumptionRoleToEnvironment.js b/frontend/src/modules/Environments/services/addConsumptionRoleToEnvironment.js new file mode 100644 index 000000000..9fb3c4726 --- /dev/null +++ b/frontend/src/modules/Environments/services/addConsumptionRoleToEnvironment.js @@ -0,0 +1,20 @@ +import { gql } from 'apollo-boost'; + +export const addConsumptionRoleToEnvironment = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation addConsumptionRoleToEnvironment( + $input: AddConsumptionRoleToEnvironmentInput! + ) { + addConsumptionRoleToEnvironment(input: $input) { + consumptionRoleUri + consumptionRoleName + environmentUri + groupUri + IAMRoleArn + } + } + ` +}); diff --git a/frontend/src/api/Environment/archiveEnvironment.js b/frontend/src/modules/Environments/services/archiveEnvironment.js similarity index 76% rename from frontend/src/api/Environment/archiveEnvironment.js rename to frontend/src/modules/Environments/services/archiveEnvironment.js index 0789cdcaf..b6eb2a766 100644 --- a/frontend/src/api/Environment/archiveEnvironment.js +++ b/frontend/src/modules/Environments/services/archiveEnvironment.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const archiveEnvironment = ({ environmentUri, deleteFromAWS }) => ({ +export const archiveEnvironment = ({ environmentUri, deleteFromAWS }) => ({ variables: { environmentUri, deleteFromAWS @@ -17,5 +17,3 @@ const archiveEnvironment = ({ environmentUri, deleteFromAWS }) => ({ } ` }); - -export default archiveEnvironment; diff --git a/frontend/src/modules/Environments/services/createEnvironment.js b/frontend/src/modules/Environments/services/createEnvironment.js new file mode 100644 index 000000000..70bf78445 --- /dev/null +++ b/frontend/src/modules/Environments/services/createEnvironment.js @@ -0,0 +1,23 @@ +import { gql } from 'apollo-boost'; + +export const createEnvironment = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation CreateEnvironment($input: NewEnvironmentInput) { + createEnvironment(input: $input) { + environmentUri + label + userRoleInEnvironment + SamlGroupName + AwsAccountId + created + parameters { + key + value + } + } + } + ` +}); diff --git a/frontend/src/api/Vpc/createNetwork.js b/frontend/src/modules/Environments/services/createNetwork.js similarity index 84% rename from frontend/src/api/Vpc/createNetwork.js rename to frontend/src/modules/Environments/services/createNetwork.js index 5dff2097a..fce882a98 100644 --- a/frontend/src/api/Vpc/createNetwork.js +++ b/frontend/src/modules/Environments/services/createNetwork.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createNetwork = (input) => ({ +export const createNetwork = (input) => ({ variables: { input }, @@ -20,5 +20,3 @@ const createNetwork = (input) => ({ } ` }); - -export default createNetwork; diff --git a/frontend/src/modules/Environments/services/deleteNetwork.js b/frontend/src/modules/Environments/services/deleteNetwork.js new file mode 100644 index 000000000..e9088ac7a --- /dev/null +++ b/frontend/src/modules/Environments/services/deleteNetwork.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteNetwork = ({ vpcUri }) => ({ + variables: { + vpcUri + }, + mutation: gql` + mutation deleteNetwork($vpcUri: String!) { + deleteNetwork(vpcUri: $vpcUri) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/disableDataSubscriptions.js b/frontend/src/modules/Environments/services/disableDataSubscriptions.js new file mode 100644 index 000000000..50296f79f --- /dev/null +++ b/frontend/src/modules/Environments/services/disableDataSubscriptions.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const disableDataSubscriptions = ({ environmentUri }) => ({ + variables: { + environmentUri + }, + mutation: gql` + mutation DisableDataSubscriptions($environmentUri: String!) { + DisableDataSubscriptions(environmentUri: $environmentUri) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/enableDataSubscriptions.js b/frontend/src/modules/Environments/services/enableDataSubscriptions.js new file mode 100644 index 000000000..bec469eda --- /dev/null +++ b/frontend/src/modules/Environments/services/enableDataSubscriptions.js @@ -0,0 +1,16 @@ +import { gql } from 'apollo-boost'; + +export const enableDataSubscriptions = ({ environmentUri, input }) => ({ + variables: { + environmentUri, + input + }, + mutation: gql` + mutation enableDataSubscriptions( + $environmentUri: String! + $input: EnableDataSubscriptionsInput + ) { + enableDataSubscriptions(environmentUri: $environmentUri, input: $input) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/generateEnvironmentAccessToken.js b/frontend/src/modules/Environments/services/generateEnvironmentAccessToken.js new file mode 100644 index 000000000..21bf9133b --- /dev/null +++ b/frontend/src/modules/Environments/services/generateEnvironmentAccessToken.js @@ -0,0 +1,22 @@ +import { gql } from 'apollo-boost'; + +export const generateEnvironmentAccessToken = ({ + environmentUri, + groupUri +}) => ({ + variables: { + environmentUri, + groupUri + }, + query: gql` + query GenerateEnvironmentAccessToken( + $environmentUri: String! + $groupUri: String + ) { + generateEnvironmentAccessToken( + environmentUri: $environmentUri + groupUri: $groupUri + ) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/getCDKExecPolicyPresignedUrl.js b/frontend/src/modules/Environments/services/getCDKExecPolicyPresignedUrl.js new file mode 100644 index 000000000..aa864714a --- /dev/null +++ b/frontend/src/modules/Environments/services/getCDKExecPolicyPresignedUrl.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getCDKExecPolicyPresignedUrl = (organizationUri) => ({ + variables: { + organizationUri + }, + query: gql` + query getCDKExecPolicyPresignedUrl($organizationUri: String!) { + getCDKExecPolicyPresignedUrl(organizationUri: $organizationUri) + } + ` +}); diff --git a/frontend/src/api/Environment/getEnvironment.js b/frontend/src/modules/Environments/services/getEnvironment.js similarity index 77% rename from frontend/src/api/Environment/getEnvironment.js rename to frontend/src/modules/Environments/services/getEnvironment.js index 6096392f7..faf676531 100644 --- a/frontend/src/api/Environment/getEnvironment.js +++ b/frontend/src/modules/Environments/services/getEnvironment.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getEnvironment = ({ environmentUri }) => ({ +export const getEnvironment = ({ environmentUri }) => ({ variables: { environmentUri }, @@ -14,11 +14,6 @@ const getEnvironment = ({ environmentUri }) => ({ name label AwsAccountId - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled region owner tags @@ -50,19 +45,16 @@ const getEnvironment = ({ environmentUri }) => ({ outputs resources } - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled networks { VpcId privateSubnetIds publicSubnetIds } + parameters { + key + value + } } } ` }); - -export default getEnvironment; diff --git a/frontend/src/modules/Environments/services/getEnvironmentAssumeRoleUrl.js b/frontend/src/modules/Environments/services/getEnvironmentAssumeRoleUrl.js new file mode 100644 index 000000000..448e4eea5 --- /dev/null +++ b/frontend/src/modules/Environments/services/getEnvironmentAssumeRoleUrl.js @@ -0,0 +1,19 @@ +import { gql } from 'apollo-boost'; + +export const getEnvironmentAssumeRoleUrl = ({ environmentUri, groupUri }) => ({ + variables: { + environmentUri, + groupUri + }, + query: gql` + query getEnvironmentAssumeRoleUrl( + $environmentUri: String! + $groupUri: String + ) { + getEnvironmentAssumeRoleUrl( + environmentUri: $environmentUri + groupUri: $groupUri + ) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/getPivotRoleExternalId.js b/frontend/src/modules/Environments/services/getPivotRoleExternalId.js new file mode 100644 index 000000000..08e520db9 --- /dev/null +++ b/frontend/src/modules/Environments/services/getPivotRoleExternalId.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getPivotRoleExternalId = (organizationUri) => ({ + variables: { + organizationUri + }, + query: gql` + query getPivotRoleExternalId($organizationUri: String!) { + getPivotRoleExternalId(organizationUri: $organizationUri) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/getPivotRoleName.js b/frontend/src/modules/Environments/services/getPivotRoleName.js new file mode 100644 index 000000000..e49c8d773 --- /dev/null +++ b/frontend/src/modules/Environments/services/getPivotRoleName.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getPivotRoleName = (organizationUri) => ({ + variables: { + organizationUri + }, + query: gql` + query getPivotRoleName($organizationUri: String!) { + getPivotRoleName(organizationUri: $organizationUri) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/getPivotRolePresignedUrl.js b/frontend/src/modules/Environments/services/getPivotRolePresignedUrl.js new file mode 100644 index 000000000..a3c313620 --- /dev/null +++ b/frontend/src/modules/Environments/services/getPivotRolePresignedUrl.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getPivotRolePresignedUrl = (organizationUri) => ({ + variables: { + organizationUri + }, + query: gql` + query getPivotRolePresignedUrl($organizationUri: String!) { + getPivotRolePresignedUrl(organizationUri: $organizationUri) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/index.js b/frontend/src/modules/Environments/services/index.js new file mode 100644 index 000000000..14f5b659f --- /dev/null +++ b/frontend/src/modules/Environments/services/index.js @@ -0,0 +1,24 @@ +export * from './addConsumptionRoleToEnvironment'; +export * from './archiveEnvironment'; +export * from './createEnvironment'; +export * from './createNetwork'; +export * from './deleteNetwork'; +export * from './disableDataSubscriptions'; +export * from './enableDataSubscriptions'; +export * from './generateEnvironmentAccessToken'; +export * from './getEnvironment'; +export * from './getEnvironmentAssumeRoleUrl'; +export * from './getPivotRoleExternalId'; +export * from './getPivotRoleName'; +export * from './getPivotRolePresignedUrl'; +export * from './getCDKExecPolicyPresignedUrl.js'; +export * from './inviteGroup'; +export * from './listAllEnvironmentConsumptionRoles'; +export * from './listAllEnvironmentGroups'; +export * from './listDatasetsCreatedInEnvironment'; +export * from './listEnvironmentNetworks'; +export * from './listEnvironmentPermissions'; +export * from './removeConsumptionRole'; +export * from './removeGroup'; +export * from './updateEnvironment'; +export * from './updateGroupEnvironmentPermissions'; diff --git a/frontend/src/modules/Environments/services/inviteGroup.js b/frontend/src/modules/Environments/services/inviteGroup.js new file mode 100644 index 000000000..32e282fe1 --- /dev/null +++ b/frontend/src/modules/Environments/services/inviteGroup.js @@ -0,0 +1,14 @@ +import { gql } from 'apollo-boost'; + +export const inviteGroupOnEnvironment = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation inviteGroupOnEnvironment($input: InviteGroupOnEnvironmentInput!) { + inviteGroupOnEnvironment(input: $input) { + environmentUri + } + } + ` +}); diff --git a/frontend/src/api/Environment/listAllEnvironmentConsumptionRoles.js b/frontend/src/modules/Environments/services/listAllEnvironmentConsumptionRoles.js similarity index 81% rename from frontend/src/api/Environment/listAllEnvironmentConsumptionRoles.js rename to frontend/src/modules/Environments/services/listAllEnvironmentConsumptionRoles.js index 435d0dacf..4024a9575 100644 --- a/frontend/src/api/Environment/listAllEnvironmentConsumptionRoles.js +++ b/frontend/src/modules/Environments/services/listAllEnvironmentConsumptionRoles.js @@ -1,6 +1,9 @@ import { gql } from 'apollo-boost'; -const listAllEnvironmentConsumptionRoles = ({ filter, environmentUri }) => ({ +export const listAllEnvironmentConsumptionRoles = ({ + filter, + environmentUri +}) => ({ variables: { environmentUri, filter @@ -30,5 +33,3 @@ const listAllEnvironmentConsumptionRoles = ({ filter, environmentUri }) => ({ } ` }); - -export default listAllEnvironmentConsumptionRoles; diff --git a/frontend/src/api/Environment/listAllEnvironmentGroups.js b/frontend/src/modules/Environments/services/listAllEnvironmentGroups.js similarity index 87% rename from frontend/src/api/Environment/listAllEnvironmentGroups.js rename to frontend/src/modules/Environments/services/listAllEnvironmentGroups.js index b5657d398..bc97920ea 100644 --- a/frontend/src/api/Environment/listAllEnvironmentGroups.js +++ b/frontend/src/modules/Environments/services/listAllEnvironmentGroups.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listAllEnvironmentGroups = ({ filter, environmentUri }) => ({ +export const listAllEnvironmentGroups = ({ filter, environmentUri }) => ({ variables: { environmentUri, filter @@ -36,5 +36,3 @@ const listAllEnvironmentGroups = ({ filter, environmentUri }) => ({ } ` }); - -export default listAllEnvironmentGroups; diff --git a/frontend/src/api/Environment/listDatasetsCreatedInEnvironment.js b/frontend/src/modules/Environments/services/listDatasetsCreatedInEnvironment.js similarity index 84% rename from frontend/src/api/Environment/listDatasetsCreatedInEnvironment.js rename to frontend/src/modules/Environments/services/listDatasetsCreatedInEnvironment.js index 4981b8c51..840106ec8 100644 --- a/frontend/src/api/Environment/listDatasetsCreatedInEnvironment.js +++ b/frontend/src/modules/Environments/services/listDatasetsCreatedInEnvironment.js @@ -1,6 +1,9 @@ import { gql } from 'apollo-boost'; -const listDatasetsCreatedInEnvironment = ({ filter, environmentUri }) => ({ +export const listDatasetsCreatedInEnvironment = ({ + filter, + environmentUri +}) => ({ variables: { environmentUri, filter @@ -38,5 +41,3 @@ const listDatasetsCreatedInEnvironment = ({ filter, environmentUri }) => ({ } ` }); - -export default listDatasetsCreatedInEnvironment; diff --git a/frontend/src/api/Environment/listEnvironmentNetworks.js b/frontend/src/modules/Environments/services/listEnvironmentNetworks.js similarity index 84% rename from frontend/src/api/Environment/listEnvironmentNetworks.js rename to frontend/src/modules/Environments/services/listEnvironmentNetworks.js index 4dfa5769a..297709849 100644 --- a/frontend/src/api/Environment/listEnvironmentNetworks.js +++ b/frontend/src/modules/Environments/services/listEnvironmentNetworks.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listEnvironmentNetworks = ({ filter, environmentUri }) => ({ +export const listEnvironmentNetworks = ({ filter, environmentUri }) => ({ variables: { environmentUri, filter @@ -34,5 +34,3 @@ const listEnvironmentNetworks = ({ filter, environmentUri }) => ({ } ` }); - -export default listEnvironmentNetworks; diff --git a/frontend/src/modules/Environments/services/listEnvironmentPermissions.js b/frontend/src/modules/Environments/services/listEnvironmentPermissions.js new file mode 100644 index 000000000..47315f2d8 --- /dev/null +++ b/frontend/src/modules/Environments/services/listEnvironmentPermissions.js @@ -0,0 +1,20 @@ +import { gql } from 'apollo-boost'; + +export const listEnvironmentGroupInvitationPermissions = ({ + environmentUri +}) => ({ + variables: { + environmentUri + }, + query: gql` + query listEnvironmentGroupInvitationPermissions($environmentUri: String) { + listEnvironmentGroupInvitationPermissions( + environmentUri: $environmentUri + ) { + permissionUri + name + description + } + } + ` +}); diff --git a/frontend/src/modules/Environments/services/removeConsumptionRole.js b/frontend/src/modules/Environments/services/removeConsumptionRole.js new file mode 100644 index 000000000..faa0c2b7c --- /dev/null +++ b/frontend/src/modules/Environments/services/removeConsumptionRole.js @@ -0,0 +1,22 @@ +import { gql } from 'apollo-boost'; + +export const removeConsumptionRoleFromEnvironment = ({ + environmentUri, + consumptionRoleUri +}) => ({ + variables: { + environmentUri, + consumptionRoleUri + }, + mutation: gql` + mutation removeConsumptionRoleFromEnvironment( + $environmentUri: String! + $consumptionRoleUri: String! + ) { + removeConsumptionRoleFromEnvironment( + environmentUri: $environmentUri + consumptionRoleUri: $consumptionRoleUri + ) + } + ` +}); diff --git a/frontend/src/modules/Environments/services/removeGroup.js b/frontend/src/modules/Environments/services/removeGroup.js new file mode 100644 index 000000000..da03d924b --- /dev/null +++ b/frontend/src/modules/Environments/services/removeGroup.js @@ -0,0 +1,21 @@ +import { gql } from 'apollo-boost'; + +export const removeGroupFromEnvironment = ({ environmentUri, groupUri }) => ({ + variables: { + environmentUri, + groupUri + }, + mutation: gql` + mutation removeGroupFromEnvironment( + $environmentUri: String! + $groupUri: String! + ) { + removeGroupFromEnvironment( + environmentUri: $environmentUri + groupUri: $groupUri + ) { + environmentUri + } + } + ` +}); diff --git a/frontend/src/modules/Environments/services/updateEnvironment.js b/frontend/src/modules/Environments/services/updateEnvironment.js new file mode 100644 index 000000000..e49cba0ee --- /dev/null +++ b/frontend/src/modules/Environments/services/updateEnvironment.js @@ -0,0 +1,27 @@ +import { gql } from 'apollo-boost'; + +export const updateEnvironment = ({ environmentUri, input }) => ({ + variables: { + environmentUri, + input + }, + mutation: gql` + mutation UpdateEnvironment( + $environmentUri: String! + $input: ModifyEnvironmentInput + ) { + updateEnvironment(environmentUri: $environmentUri, input: $input) { + environmentUri + label + userRoleInEnvironment + SamlGroupName + AwsAccountId + created + parameters { + key + value + } + } + } + ` +}); diff --git a/frontend/src/modules/Environments/services/updateGroupEnvironmentPermissions.js b/frontend/src/modules/Environments/services/updateGroupEnvironmentPermissions.js new file mode 100644 index 000000000..872f70968 --- /dev/null +++ b/frontend/src/modules/Environments/services/updateGroupEnvironmentPermissions.js @@ -0,0 +1,16 @@ +import { gql } from 'apollo-boost'; + +export const updateGroupEnvironmentPermissions = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation updateGroupEnvironmentPermissions( + $input: InviteGroupOnEnvironmentInput! + ) { + updateGroupEnvironmentPermissions(input: $input) { + environmentUri + } + } + ` +}); diff --git a/frontend/src/views/Environments/EnvironmentCreateForm.js b/frontend/src/modules/Environments/views/EnvironmentCreateForm.js similarity index 80% rename from frontend/src/views/Environments/EnvironmentCreateForm.js rename to frontend/src/modules/Environments/views/EnvironmentCreateForm.js index 88978f9f4..c2f70e89c 100644 --- a/frontend/src/views/Environments/EnvironmentCreateForm.js +++ b/frontend/src/modules/Environments/views/EnvironmentCreateForm.js @@ -1,8 +1,6 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { CloudDownloadOutlined, CopyAllOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import Autocomplete from '@mui/lab/Autocomplete'; import { Box, Breadcrumbs, @@ -24,28 +22,36 @@ import { TextField, Typography } from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; -import { CloudDownloadOutlined, CopyAllOutlined } from '@mui/icons-material'; import { useTheme } from '@mui/styles'; -import Autocomplete from '@mui/lab/Autocomplete'; -import useClient from '../../hooks/useClient'; -import useGroups from '../../hooks/useGroups'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import getOrganization from '../../api/Organization/getOrganization'; -import createEnvironment from '../../api/Environment/createEnvironment'; -import getTrustAccount from '../../api/Environment/getTrustAccount'; -import { AwsRegions } from '../../constants'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getPivotRolePresignedUrl from '../../api/Environment/getPivotRolePresignedUrl'; -import getCDKExecPolicyPresignedUrl from '../../api/Environment/getCDKExecPolicyPresignedUrl'; -import getPivotRoleExternalId from '../../api/Environment/getPivotRoleExternalId'; -import getPivotRoleName from '../../api/Environment/getPivotRoleName'; + +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; +import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + getOrganization, + getTrustAccount, + useClient, + useGroups +} from 'services'; +import { + createEnvironment, + getPivotRoleExternalId, + getPivotRoleName, + getPivotRolePresignedUrl, + getCDKExecPolicyPresignedUrl +} from '../services'; +import { AwsRegions } from 'utils'; const EnvironmentCreateForm = (props) => { const dispatch = useDispatch(); @@ -99,14 +105,16 @@ const EnvironmentCreateForm = (props) => { }; const getCDKExecPolicyUrl = async () => { - const response = await client.query(getCDKExecPolicyPresignedUrl(params.uri)); + const response = await client.query( + getCDKExecPolicyPresignedUrl(params.uri) + ); if (!response.errors) { window.open(response.data.getCDKExecPolicyPresignedUrl, '_blank'); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } }; - + const getExternalId = async () => { const response = await client.query(getPivotRoleExternalId(params.uri)); if (!response.errors) { @@ -164,13 +172,26 @@ const EnvironmentCreateForm = (props) => { tags: values.tags, description: values.description, region: values.region, - dashboardsEnabled: values.dashboardsEnabled, - notebooksEnabled: values.notebooksEnabled, - mlStudiosEnabled: values.mlStudiosEnabled, - pipelinesEnabled: values.pipelinesEnabled, - warehousesEnabled: values.warehousesEnabled, EnvironmentDefaultIAMRoleName: values.EnvironmentDefaultIAMRoleName, - resourcePrefix: values.resourcePrefix + resourcePrefix: values.resourcePrefix, + parameters: [ + { + key: 'notebooksEnabled', + value: String(values.notebooksEnabled) + }, + { + key: 'dashboardsEnabled', + value: String(values.dashboardsEnabled) + }, + { + key: 'mlStudiosEnabled', + value: String(values.mlStudiosEnabled) + }, + { + key: 'pipelinesEnabled', + value: String(values.pipelinesEnabled) + } + ] }) ); if (!response.errors) { @@ -190,7 +211,6 @@ const EnvironmentCreateForm = (props) => { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } } catch (err) { - console.error(err); setStatus({ success: false }); setErrors({ submit: err.message }); setSubmitting(false); @@ -198,6 +218,11 @@ const EnvironmentCreateForm = (props) => { } } + const regions = AwsRegions.map((region) => ({ + label: region.name, + value: region.code + })); + if (loading) { return ; } @@ -275,32 +300,45 @@ const EnvironmentCreateForm = (props) => { - 1. (OPTIONAL) As part of setting up your AWS Environment with CDK you need to specify a IAM Policy that gives permission for CDK to create AWS Resources via CloudFormation (i.e. CDK Execution Policy). - You optionally can use the below CloudFormation template to create the custom IAM policy that is more restrictive than the default AdministratorAccess policy. + 1. (OPTIONAL) As part of setting up your AWS Environment with + CDK you need to specify a IAM Policy that gives permission for + CDK to create AWS Resources via CloudFormation (i.e. CDK + Execution Policy). You optionally can use the below + CloudFormation template to create the custom IAM policy that + is more restrictive than the default{' '} + AdministratorAccess policy. 2. Bootstrap your AWS account with AWS CDK - + - + If Using Default CDK Execution Policy: @@ -327,7 +365,11 @@ const EnvironmentCreateForm = (props) => { - + If Using Custom CDK Execution Policy (From Step 1): @@ -353,68 +395,72 @@ const EnvironmentCreateForm = (props) => { - {process.env.REACT_APP_ENABLE_PIVOT_ROLE_AUTO_CREATE == 'True' ? ( - - - 3. As part of the environment CloudFormation stack data.all will create an IAM role (Pivot Role) to manage AWS operations in the environment AWS Account. - - - ): ( - - - - 3. Create an IAM role named {pivotRoleName} using the AWS - CloudFormation stack below - - - - - - - - - - - )} + {process.env.REACT_APP_ENABLE_PIVOT_ROLE_AUTO_CREATE === + 'True' ? ( + + + 3. As part of the environment CloudFormation stack data.all + will create an IAM role (Pivot Role) to manage AWS + operations in the environment AWS Account. + + + ) : ( + + + + 3. Create an IAM role named {pivotRoleName} using + the AWS CloudFormation stack below + + + + + + + + + + + )} - Make sure that the services needed for the selected environment features are available in your AWS Account. + Make sure that the services needed for the selected + environment features are available in your AWS Account. @@ -432,7 +478,6 @@ const EnvironmentCreateForm = (props) => { notebooksEnabled: true, mlStudiosEnabled: true, pipelinesEnabled: true, - warehousesEnabled: true, EnvironmentDefaultIAMRoleName: '', resourcePrefix: 'dataall' }} @@ -453,7 +498,7 @@ const EnvironmentCreateForm = (props) => { 'region', 'Region is not supported', (region) => - AwsRegions.filter((option) => + regions.filter((option) => [option.label, option.value].includes(region) ).length >= 1 ), @@ -682,37 +727,6 @@ const EnvironmentCreateForm = (props) => { /> -{/* - - - } - label={ - - Warehouses{' '} - - (Requires Amazon Redshift clusters) - - - } - labelPlacement="end" - value={values.warehousesEnabled} - /> - - */} @@ -742,9 +756,9 @@ const EnvironmentCreateForm = (props) => { option.label)} + options={regions.map((option) => option.label)} onChange={(event, value) => { - const selectedRegion = AwsRegions.filter( + const selectedRegion = regions.filter( (option) => option.label === value ); setFieldValue( diff --git a/frontend/src/views/Environments/EnvironmentEditForm.js b/frontend/src/modules/Environments/views/EnvironmentEditForm.js similarity index 88% rename from frontend/src/views/Environments/EnvironmentEditForm.js rename to frontend/src/modules/Environments/views/EnvironmentEditForm.js index 2f7c53342..4c3e4ce40 100644 --- a/frontend/src/views/Environments/EnvironmentEditForm.js +++ b/frontend/src/modules/Environments/views/EnvironmentEditForm.js @@ -1,8 +1,4 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -21,17 +17,21 @@ import { TextField, Typography } from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getEnvironment from '../../api/Environment/getEnvironment'; -import updateEnvironment from '../../api/Environment/updateEnvironment'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { getEnvironment, updateEnvironment } from '../services'; const EnvironmentEditForm = (props) => { const dispatch = useDispatch(); @@ -48,7 +48,11 @@ const EnvironmentEditForm = (props) => { getEnvironment({ environmentUri: params.uri }) ); if (!response.errors && response.data.getEnvironment) { - setEnv(response.data.getEnvironment); + const environment = response.data.getEnvironment; + environment.parameters = Object.fromEntries( + environment.parameters.map((x) => [x.key, x.value]) + ); + setEnv(environment); } else { const error = response.errors ? response.errors[0].message @@ -71,12 +75,25 @@ const EnvironmentEditForm = (props) => { label: values.label, tags: values.tags, description: values.description, - dashboardsEnabled: values.dashboardsEnabled, - notebooksEnabled: values.notebooksEnabled, - mlStudiosEnabled: values.mlStudiosEnabled, - pipelinesEnabled: values.pipelinesEnabled, - warehousesEnabled: values.warehousesEnabled, - resourcePrefix: values.resourcePrefix + resourcePrefix: values.resourcePrefix, + parameters: [ + { + key: 'notebooksEnabled', + value: String(values.notebooksEnabled) + }, + { + key: 'mlStudiosEnabled', + value: String(values.mlStudiosEnabled) + }, + { + key: 'pipelinesEnabled', + value: String(values.pipelinesEnabled) + }, + { + key: 'dashboardsEnabled', + value: String(values.dashboardsEnabled) + } + ] } }) ); @@ -191,11 +208,11 @@ const EnvironmentEditForm = (props) => { label: env.label, description: env.description, tags: env.tags || [], - dashboardsEnabled: env.dashboardsEnabled, - notebooksEnabled: env.notebooksEnabled, - mlStudiosEnabled: env.mlStudiosEnabled, - pipelinesEnabled: env.pipelinesEnabled, - warehousesEnabled: env.warehousesEnabled, + notebooksEnabled: env.parameters['notebooksEnabled'] === 'true', + mlStudiosEnabled: env.parameters['mlStudiosEnabled'] === 'true', + pipelinesEnabled: env.parameters['pipelinesEnabled'] === 'true', + dashboardsEnabled: + env.parameters['dashboardsEnabled'] === 'true', resourcePrefix: env.resourcePrefix }} validationSchema={Yup.object().shape({ @@ -458,6 +475,7 @@ const EnvironmentEditForm = (props) => { } labelPlacement="end" + value={values.mlStudiosEnabled} /> @@ -490,37 +508,6 @@ const EnvironmentEditForm = (props) => { /> -{/* - - - } - label={ - - Warehouses{' '} - - (Requires Amazon Redshift clusters) - - - } - labelPlacement="end" - value={values.warehousesEnabled} - /> - - */} diff --git a/frontend/src/views/Environments/EnvironmentList.js b/frontend/src/modules/Environments/views/EnvironmentList.js similarity index 81% rename from frontend/src/views/Environments/EnvironmentList.js rename to frontend/src/modules/Environments/views/EnvironmentList.js index 6b105d1c2..0f86aaa66 100644 --- a/frontend/src/views/Environments/EnvironmentList.js +++ b/frontend/src/modules/Environments/views/EnvironmentList.js @@ -1,5 +1,3 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; import { Box, Breadcrumbs, @@ -10,17 +8,19 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import useSettings from '../../hooks/useSettings'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import EnvironmentListItem from './EnvironmentListItem'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironments, useClient } from 'services'; +import { EnvironmentListItem } from '../components'; function EnvironmentsPageHeader() { return ( @@ -60,15 +60,15 @@ function EnvironmentsPageHeader() { const EnvironmentList = () => { const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const [inputValue, setInputValue] = useState(''); const [loading, setLoading] = useState(true); const client = useClient(); const fetchItems = useCallback(async () => { setLoading(true); - const response = await client.query(listEnvironments({filter})); + const response = await client.query(listEnvironments({ filter })); if (!response.errors) { setItems(response.data.listEnvironments); } else { @@ -84,7 +84,7 @@ const EnvironmentList = () => { const handleInputKeyup = (event) => { if (event.code === 'Enter') { - setFilter({page: 1, term: event.target.value}); + setFilter({ page: 1, term: event.target.value }); fetchItems().catch((e) => dispatch({ type: SET_ERROR, error: e.message }) ); diff --git a/frontend/src/views/Environments/EnvironmentView.js b/frontend/src/modules/Environments/views/EnvironmentView.js similarity index 85% rename from frontend/src/views/Environments/EnvironmentView.js rename to frontend/src/modules/Environments/views/EnvironmentView.js index 71c271af3..524621107 100644 --- a/frontend/src/views/Environments/EnvironmentView.js +++ b/frontend/src/modules/Environments/views/EnvironmentView.js @@ -1,6 +1,11 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { + FolderOpen, + Info, + LocalOffer, + NotificationsActive, + SupervisedUserCircleRounded, + Warning +} from '@mui/icons-material'; import { Box, Breadcrumbs, @@ -16,36 +21,29 @@ import { Tabs, Typography } from '@mui/material'; -import { - FolderOpen, - Info, - LocalOffer, - NotificationsActive, - SupervisedUserCircleRounded, - Warning -} from '@mui/icons-material'; import { useSnackbar } from 'notistack'; -import { useNavigate } from 'react-router'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaAws, FaNetworkWired, FaTrash } from 'react-icons/fa'; -import { GoDatabase } from 'react-icons/go'; -import useSettings from '../../hooks/useSettings'; -import getEnvironment from '../../api/Environment/getEnvironment'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import EnvironmentOverview from './EnvironmentOverview'; -import EnvironmentDatasets from './EnvironmentDatasets'; -import EnvironmentWarehouses from './EnvironmentWarehouses'; -import Stack from '../Stack/Stack'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import archiveEnvironment from '../../api/Environment/archiveEnvironment'; -import PencilAltIcon from '../../icons/PencilAlt'; -import EnvironmentSubscriptions from './EnvironmentSubscriptions'; -import EnvironmentTeams from './EnvironmentTeams'; -import EnvironmentNetworks from '../Networks/NetworkList'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import StackStatus from '../Stack/StackStatus'; -import KeyValueTagList from '../KeyValueTags/KeyValueTagList'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + DeleteObjectWithFrictionModal, + PencilAltIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { archiveEnvironment, getEnvironment } from '../services'; +import { KeyValueTagList, Stack, StackStatus } from 'modules/Shared'; +import { + EnvironmentDatasets, + EnvironmentOverview, + EnvironmentSubscriptions, + EnvironmentTeams, + EnvironmentNetworks +} from '../components'; const tabs = [ { label: 'Overview', value: 'overview', icon: }, @@ -60,7 +58,6 @@ const tabs = [ icon: }, { label: 'Networks', value: 'networks', icon: }, - /*{ label: 'Warehouses', value: 'warehouses', icon: },*/ { label: 'Subscriptions', value: 'subscriptions', @@ -122,12 +119,14 @@ const EnvironmentView = () => { getEnvironment({ environmentUri: params.uri }) ); if (!response.errors && response.data.getEnvironment) { - setEnv(response.data.getEnvironment); - setStack(response.data.getEnvironment.stack); + const environment = response.data.getEnvironment; + environment.parameters = Object.fromEntries( + environment.parameters.map((x) => [x.key, x.value]) + ); + setEnv(environment); + setStack(environment.stack); setIsAdmin( - ['Admin', 'Owner'].indexOf( - response.data.getEnvironment.userRoleInEnvironment - ) !== -1 + ['Admin', 'Owner'].indexOf(environment.userRoleInEnvironment) !== -1 ); } else { const error = response.errors @@ -258,9 +257,6 @@ const EnvironmentView = () => { {currentTab === 'networks' && ( )} - {currentTab === 'warehouses' && ( - - )} {currentTab === 'subscriptions' && ( { +export const FolderOverview = (props) => { const { folder, isAdmin, ...other } = props; return ( @@ -44,5 +43,3 @@ FolderOverview.propTypes = { folder: PropTypes.object.isRequired, isAdmin: PropTypes.bool.isRequired }; - -export default FolderOverview; diff --git a/frontend/src/views/Folders/FolderS3Properties.js b/frontend/src/modules/Folders/components/FolderS3Properties.js similarity index 95% rename from frontend/src/views/Folders/FolderS3Properties.js rename to frontend/src/modules/Folders/components/FolderS3Properties.js index 7e2650ab1..5118896f5 100644 --- a/frontend/src/views/Folders/FolderS3Properties.js +++ b/frontend/src/modules/Folders/components/FolderS3Properties.js @@ -7,7 +7,7 @@ import { Typography } from '@mui/material'; -const FolderS3Properties = (props) => { +export const FolderS3Properties = (props) => { const { folder } = props; return ( @@ -54,5 +54,3 @@ FolderS3Properties.propTypes = { folder: PropTypes.object.isRequired, isAdmin: PropTypes.bool.isRequired }; - -export default FolderS3Properties; diff --git a/frontend/src/modules/Folders/components/index.js b/frontend/src/modules/Folders/components/index.js new file mode 100644 index 000000000..6da5e8c6b --- /dev/null +++ b/frontend/src/modules/Folders/components/index.js @@ -0,0 +1,2 @@ +export * from './FolderOverview'; +export * from './FolderS3Properties'; diff --git a/frontend/src/api/Dataset/getDatasetStorageLocation.js b/frontend/src/modules/Folders/services/getDatasetStorageLocation.js similarity index 91% rename from frontend/src/api/Dataset/getDatasetStorageLocation.js rename to frontend/src/modules/Folders/services/getDatasetStorageLocation.js index 2cce74417..cd0e738e6 100644 --- a/frontend/src/api/Dataset/getDatasetStorageLocation.js +++ b/frontend/src/modules/Folders/services/getDatasetStorageLocation.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getDatasetStorageLocation = (locationUri) => ({ +export const getDatasetStorageLocation = (locationUri) => ({ variables: { locationUri }, @@ -50,5 +50,3 @@ const getDatasetStorageLocation = (locationUri) => ({ } ` }); - -export default getDatasetStorageLocation; diff --git a/frontend/src/modules/Folders/services/index.js b/frontend/src/modules/Folders/services/index.js new file mode 100644 index 000000000..aed415a25 --- /dev/null +++ b/frontend/src/modules/Folders/services/index.js @@ -0,0 +1,2 @@ +export * from './getDatasetStorageLocation'; +export * from './updateDatasetStorageLocation'; diff --git a/frontend/src/api/Dataset/updateDatasetStorageLocation.js b/frontend/src/modules/Folders/services/updateDatasetStorageLocation.js similarity index 75% rename from frontend/src/api/Dataset/updateDatasetStorageLocation.js rename to frontend/src/modules/Folders/services/updateDatasetStorageLocation.js index 8aa49f052..1c90766fd 100644 --- a/frontend/src/api/Dataset/updateDatasetStorageLocation.js +++ b/frontend/src/modules/Folders/services/updateDatasetStorageLocation.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateDatasetStorageLocation = ({ locationUri, input }) => ({ +export const updateDatasetStorageLocation = ({ locationUri, input }) => ({ variables: { locationUri, input @@ -16,5 +16,3 @@ const updateDatasetStorageLocation = ({ locationUri, input }) => ({ } ` }); - -export default updateDatasetStorageLocation; diff --git a/frontend/src/views/Folders/FolderCreateForm.js b/frontend/src/modules/Folders/views/FolderCreateForm.js similarity index 95% rename from frontend/src/views/Folders/FolderCreateForm.js rename to frontend/src/modules/Folders/views/FolderCreateForm.js index 2ee0eaaf5..5d59e254e 100644 --- a/frontend/src/views/Folders/FolderCreateForm.js +++ b/frontend/src/modules/Folders/views/FolderCreateForm.js @@ -1,5 +1,4 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -16,20 +15,20 @@ import { Typography } from '@mui/material'; import { Formik } from 'formik'; -import { LoadingButton } from '@mui/lab'; -import * as Yup from 'yup'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; import { useCallback, useEffect, useState } from 'react'; -import ChipInput from '../../components/TagsInput'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import addDatasetStorageLocation from '../../api/Dataset/addDatasetStorageLocation'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import getDataset from '../../api/Dataset/getDataset'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient, addDatasetStorageLocation, getDataset } from 'services'; function FolderCreateHeader(props) { const { dataset } = props; diff --git a/frontend/src/views/Folders/FolderEditForm.js b/frontend/src/modules/Folders/views/FolderEditForm.js similarity index 95% rename from frontend/src/views/Folders/FolderEditForm.js rename to frontend/src/modules/Folders/views/FolderEditForm.js index 443f9a809..1ad99cd71 100644 --- a/frontend/src/views/Folders/FolderEditForm.js +++ b/frontend/src/modules/Folders/views/FolderEditForm.js @@ -1,6 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { LoadingButton } from '@mui/lab'; import { Autocomplete, Box, @@ -17,23 +15,27 @@ import { TextField, Typography } from '@mui/material'; -import { Formik } from 'formik'; import CircularProgress from '@mui/material/CircularProgress'; -import { LoadingButton } from '@mui/lab'; -import * as PropTypes from 'prop-types'; +import { Formik } from 'formik'; import { useSnackbar } from 'notistack'; +import * as PropTypes from 'prop-types'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; import * as Yup from 'yup'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import useClient from '../../hooks/useClient'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import searchGlossary from '../../api/Glossary/searchGlossary'; -import ChipInput from '../../components/TagsInput'; -import getDatasetStorageLocation from '../../api/Dataset/getDatasetStorageLocation'; -import updateDatasetStorageLocation from '../../api/Dataset/updateDatasetStorageLocation'; -import * as Defaults from '../../components/defaults'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient, searchGlossary } from 'services'; +import { + getDatasetStorageLocation, + updateDatasetStorageLocation +} from '../services'; function FolderEditHeader(props) { const { folder } = props; @@ -138,7 +140,7 @@ const FolderEditForm = () => { ); } setFolderTerms(fetchedTerms); - response = client.query(searchGlossary(Defaults.SelectListFilter)); + response = client.query(searchGlossary(Defaults.selectListFilter)); response.then((result) => { if ( result.data.searchGlossary && diff --git a/frontend/src/views/Folders/FolderView.js b/frontend/src/modules/Folders/views/FolderView.js similarity index 91% rename from frontend/src/views/Folders/FolderView.js rename to frontend/src/modules/Folders/views/FolderView.js index f7fc281de..3f3cb601a 100644 --- a/frontend/src/views/Folders/FolderView.js +++ b/frontend/src/modules/Folders/views/FolderView.js @@ -1,6 +1,5 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { ForumOutlined, Warning } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -16,24 +15,29 @@ import { Tabs, Typography } from '@mui/material'; +import { useSnackbar } from 'notistack'; import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaExternalLinkAlt, FaTrash } from 'react-icons/fa'; import { useNavigate } from 'react-router'; -import { ForumOutlined, Warning } from '@mui/icons-material'; -import { useSnackbar } from 'notistack'; -import { LoadingButton } from '@mui/lab'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import DeleteObjectModal from '../../components/DeleteObjectModal'; -import deleteDatasetStorageLocation from '../../api/Dataset/removeDatasetStorageLocation'; -import getDatasetStorageLocation from '../../api/Dataset/getDatasetStorageLocation'; -import FolderOverview from './FolderOverview'; -import PencilAltIcon from '../../icons/PencilAlt'; -import FeedComments from '../Feed/FeedComments'; -import getDatasetAdminConsoleUrl from '../../api/Dataset/getDatasetAdminConsoleUrl'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + DeleteObjectModal, + PencilAltIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + useClient, + deleteDatasetStorageLocation, + getDatasetAssumeRoleUrl +} from 'services'; +import { getDatasetStorageLocation } from '../services'; + +import { FeedComments } from 'modules/Shared'; +import { FolderOverview } from '../components'; const tabs = [{ label: 'Overview', value: 'overview' }]; @@ -47,7 +51,7 @@ function FolderPageHeader(props) { const goToS3Console = async () => { setIsLoadingUI(true); const response = await client.query( - getDatasetAdminConsoleUrl(folder.dataset.datasetUri) + getDatasetAssumeRoleUrl(folder.dataset.datasetUri) ); if (!response.errors) { window.open(response.data.getDatasetAssumeRoleUrl, '_blank'); @@ -152,7 +156,7 @@ function FolderPageHeader(props) { > Delete - )} + )} {openFeed && ( diff --git a/frontend/src/views/Glossaries/GlossaryAssociations.js b/frontend/src/modules/Glossaries/components/GlossaryAssociations.js similarity index 90% rename from frontend/src/views/Glossaries/GlossaryAssociations.js rename to frontend/src/modules/Glossaries/components/GlossaryAssociations.js index a3c2d5bd8..2d55074b5 100644 --- a/frontend/src/views/Glossaries/GlossaryAssociations.js +++ b/frontend/src/modules/Glossaries/components/GlossaryAssociations.js @@ -1,5 +1,5 @@ -import PropTypes from 'prop-types'; -import { useCallback, useEffect, useState } from 'react'; +import { BlockOutlined, CheckCircleOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Card, @@ -14,28 +14,24 @@ import { } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; import { useSnackbar } from 'notistack'; -import { BlockOutlined, CheckCircleOutlined } from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; +import PropTypes from 'prop-types'; +import { useCallback, useEffect, useState } from 'react'; import { Link as RouterLink } from 'react-router-dom'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import listGlossaryAssociations from '../../api/Glossary/listGlossaryAssociations'; -import approveTermAssociation from '../../api/Glossary/approveTermAssociation'; -import dismissTermAssociation from '../../api/Glossary/dismissTermAssociation'; -import Pager from '../../components/Pager'; -import useAuth from '../../hooks/useAuth'; +import { Defaults, Pager, RefreshTableMenu, Scrollbar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { + approveTermAssociation, + dismissTermAssociation, + listGlossaryAssociations +} from '../services'; -const GlossaryAssociations = ({ glossary }) => { +export const GlossaryAssociations = ({ glossary }) => { const client = useClient(); const dispatch = useDispatch(); - const { user } = useAuth(); const { enqueueSnackbar } = useSnackbar(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const [approving, setApproving] = useState(false); const [loading, setLoading] = useState(true); const [isAdmin, setIsAdmin] = useState(false); @@ -50,9 +46,7 @@ const GlossaryAssociations = ({ glossary }) => { ); if (!response.errors) { setIsAdmin( - ['Admin'].indexOf( - response.data.getGlossary.userRoleForGlossary - ) !== -1 + ['Admin'].indexOf(response.data.getGlossary.userRoleForGlossary) !== -1 ); setItems(response.data.getGlossary.associations); } else { @@ -277,5 +271,3 @@ const GlossaryAssociations = ({ glossary }) => { GlossaryAssociations.propTypes = { glossary: PropTypes.object.isRequired }; - -export default GlossaryAssociations; diff --git a/frontend/src/views/Glossaries/GlossaryCreateCategoryForm.js b/frontend/src/modules/Glossaries/components/GlossaryCreateCategoryForm.js similarity index 95% rename from frontend/src/views/Glossaries/GlossaryCreateCategoryForm.js rename to frontend/src/modules/Glossaries/components/GlossaryCreateCategoryForm.js index 3b490d323..1c316f9f9 100644 --- a/frontend/src/views/Glossaries/GlossaryCreateCategoryForm.js +++ b/frontend/src/modules/Glossaries/components/GlossaryCreateCategoryForm.js @@ -1,6 +1,4 @@ -import { useEffect, useState } from 'react'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -10,14 +8,15 @@ import { TextField, Typography } from '@mui/material'; -import { LoadingButton } from '@mui/lab'; +import { Formik } from 'formik'; import { useSnackbar } from 'notistack'; import PropTypes from 'prop-types'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import addCategory from '../../api/Glossary/addCategory'; +import { useEffect, useState } from 'react'; +import * as Yup from 'yup'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { createCategory } from '../services'; -const GlossaryCreateCategoryForm = ({ +export const GlossaryCreateCategoryForm = ({ client, data, refresh, @@ -36,7 +35,7 @@ const GlossaryCreateCategoryForm = ({ async function submit(values, setStatus, setSubmitting, setErrors) { try { const response = await client.mutate( - addCategory({ + createCategory({ parentUri: data.nodeUri, input: { label: values.label, @@ -200,4 +199,3 @@ GlossaryCreateCategoryForm.propTypes = { onClose: PropTypes.func.isRequired, open: PropTypes.bool.isRequired }; -export default GlossaryCreateCategoryForm; diff --git a/frontend/src/views/Glossaries/GlossaryCreateTermForm.js b/frontend/src/modules/Glossaries/components/GlossaryCreateTermForm.js similarity index 95% rename from frontend/src/views/Glossaries/GlossaryCreateTermForm.js rename to frontend/src/modules/Glossaries/components/GlossaryCreateTermForm.js index 607c52101..56a63b4e3 100644 --- a/frontend/src/views/Glossaries/GlossaryCreateTermForm.js +++ b/frontend/src/modules/Glossaries/components/GlossaryCreateTermForm.js @@ -1,6 +1,4 @@ -import { useEffect, useState } from 'react'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -10,14 +8,15 @@ import { TextField, Typography } from '@mui/material'; -import { LoadingButton } from '@mui/lab'; +import { Formik } from 'formik'; import { useSnackbar } from 'notistack'; import PropTypes from 'prop-types'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import addTerm from '../../api/Glossary/addTerm'; +import { useEffect, useState } from 'react'; +import * as Yup from 'yup'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { createTerm } from '../services'; -const GlossaryCreateTermForm = ({ +export const GlossaryCreateTermForm = ({ client, data, refresh, @@ -36,7 +35,7 @@ const GlossaryCreateTermForm = ({ async function submit(values, setStatus, setSubmitting, setErrors) { try { const response = await client.mutate( - addTerm({ + createTerm({ parentUri: data.nodeUri, input: { label: values.label, @@ -200,5 +199,3 @@ GlossaryCreateTermForm.propTypes = { onClose: PropTypes.func.isRequired, open: PropTypes.bool.isRequired }; - -export default GlossaryCreateTermForm; diff --git a/frontend/src/views/Glossaries/GlossaryListItem.js b/frontend/src/modules/Glossaries/components/GlossaryListItem.js similarity index 96% rename from frontend/src/views/Glossaries/GlossaryListItem.js rename to frontend/src/modules/Glossaries/components/GlossaryListItem.js index 9a4b4046b..1980eab5b 100644 --- a/frontend/src/views/Glossaries/GlossaryListItem.js +++ b/frontend/src/modules/Glossaries/components/GlossaryListItem.js @@ -8,16 +8,15 @@ import { Tooltip, Typography } from '@mui/material'; -import { Link as RouterLink } from 'react-router-dom'; import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; -import { BsBookmark, BsTag } from 'react-icons/bs'; import React from 'react'; +import { BsBookmark, BsTag } from 'react-icons/bs'; import * as FaIcons from 'react-icons/fa'; -import IconAvatar from '../../components/IconAvatar'; -import useCardStyle from '../../hooks/useCardStyle'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, useCardStyle } from 'design'; -const GlossaryListItem = (props) => { +export const GlossaryListItem = (props) => { const { glossary } = props; const classes = useCardStyle(); const navigate = useNavigate(); @@ -180,4 +179,3 @@ const GlossaryListItem = (props) => { GlossaryListItem.propTypes = { glossary: PropTypes.object.isRequired }; -export default GlossaryListItem; diff --git a/frontend/src/views/Glossaries/GlossaryManagement.js b/frontend/src/modules/Glossaries/components/GlossaryManagement.js similarity index 92% rename from frontend/src/views/Glossaries/GlossaryManagement.js rename to frontend/src/modules/Glossaries/components/GlossaryManagement.js index 3308ebe9c..e5af3c30b 100644 --- a/frontend/src/views/Glossaries/GlossaryManagement.js +++ b/frontend/src/modules/Glossaries/components/GlossaryManagement.js @@ -1,21 +1,20 @@ +import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; +import ArrowRightIcon from '@mui/icons-material/ArrowRight'; +import { LoadingButton, TreeItem, TreeView } from '@mui/lab'; import { Box, CircularProgress, Typography } from '@mui/material'; +import { makeStyles } from '@mui/styles'; import PropTypes from 'prop-types'; import React, { useCallback, useEffect, useState } from 'react'; -import { makeStyles } from '@mui/styles'; -import { LoadingButton, TreeItem, TreeView } from '@mui/lab'; -import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; -import ArrowRightIcon from '@mui/icons-material/ArrowRight'; import * as BsIcons from 'react-icons/bs'; -import Plus from '../../icons/Plus'; -import GlossaryNodeForm from './GlossaryNodeForm'; -import { useDispatch } from '../../store'; -import * as Defaults from '../../components/defaults'; -import listGlossaryTree from '../../api/Glossary/listGlossaryTree'; -import { SET_ERROR } from '../../store/errorReducer'; -import listToTree from '../../utils/listToTree'; -import ObjectBrief from '../../components/ObjectBrief'; -import GlossaryCreateCategoryForm from './GlossaryCreateCategoryForm'; -import GlossaryCreateTermForm from './GlossaryCreateTermForm'; +import { Defaults, ObjectBrief, PlusIcon } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listToTree } from 'utils'; +import { getGlossaryTree } from '../services'; +import { + GlossaryCreateCategoryForm, + GlossaryCreateTermForm, + GlossaryNodeForm +} from '../components'; const useTreeItemStyles = makeStyles((theme) => ({ root: { @@ -120,11 +119,12 @@ const useStyles = makeStyles({ maxWidth: 400 } }); -const GlossaryManagement = (props) => { + +export const GlossaryManagement = (props) => { const { glossary, isAdmin, client } = props; const dispatch = useDispatch(); const [fetchingItems, setFetchingItems] = useState(true); - const [items, setItems] = useState(Defaults.PagedResponseDefault); + const [items, setItems] = useState(Defaults.pagedResponse); const [nodes, setNodes] = useState([]); const classes = useStyles(); const [data, setData] = useState(glossary); @@ -151,7 +151,7 @@ const GlossaryManagement = (props) => { setFetchingItems(true); setData(glossary); const response = await client.query( - listGlossaryTree({ nodeUri: glossary.nodeUri, filter: {pageSize: 500} }) + getGlossaryTree({ nodeUri: glossary.nodeUri, filter: { pageSize: 500 } }) ); if (!response.errors && response.data.getGlossary !== null) { setItems({ ...response.data.getGlossary.tree }); @@ -342,7 +342,7 @@ const GlossaryManagement = (props) => { color="primary" disabled={data.__typename !== 'Glossary'} onClick={handleCategoryCreateModalOpen} - startIcon={} + startIcon={} sx={{ m: 1 }} variant="outlined" > @@ -352,7 +352,7 @@ const GlossaryManagement = (props) => { color="primary" disabled={data.__typename === 'Term'} onClick={handleTermCreateModalOpen} - startIcon={} + startIcon={} sx={{ m: 1 }} variant="outlined" > @@ -416,5 +416,3 @@ GlossaryManagement.propTypes = { isAdmin: PropTypes.bool.isRequired, client: PropTypes.func.isRequired }; - -export default GlossaryManagement; diff --git a/frontend/src/views/Glossaries/GlossaryNodeForm.js b/frontend/src/modules/Glossaries/components/GlossaryNodeForm.js similarity index 92% rename from frontend/src/views/Glossaries/GlossaryNodeForm.js rename to frontend/src/modules/Glossaries/components/GlossaryNodeForm.js index 724bd3e8e..cf1916ba8 100644 --- a/frontend/src/views/Glossaries/GlossaryNodeForm.js +++ b/frontend/src/modules/Glossaries/components/GlossaryNodeForm.js @@ -1,6 +1,4 @@ -import { useEffect, useState } from 'react'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; +import { LoadingButton } from '@mui/lab'; import { Box, Card, @@ -9,18 +7,21 @@ import { FormHelperText, TextField } from '@mui/material'; -import { LoadingButton } from '@mui/lab'; +import { Formik } from 'formik'; import { useSnackbar } from 'notistack'; import PropTypes from 'prop-types'; -import updateGlossary from '../../api/Glossary/updateGlossary'; -import updateCategory from '../../api/Glossary/updateCategory'; -import updateTerm from '../../api/Glossary/updateTerm'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import deleteCategory from '../../api/Glossary/deleteCategory'; -import deleteTerm from '../../api/Glossary/deleteTerm'; +import { useEffect, useState } from 'react'; +import * as Yup from 'yup'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + deleteCategory, + deleteTerm, + updateCategory, + updateGlossary, + updateTerm +} from '../services'; -const GlossaryNodeForm = ({ client, data, refresh, isAdmin }) => { +export const GlossaryNodeForm = ({ client, data, refresh, isAdmin }) => { const dispatch = useDispatch(); const [formData, setFormData] = useState(data); const [deleting, setDeleting] = useState(false); @@ -206,4 +207,3 @@ GlossaryNodeForm.propTypes = { client: PropTypes.func.isRequired, refresh: PropTypes.func.isRequired }; -export default GlossaryNodeForm; diff --git a/frontend/src/modules/Glossaries/components/index.js b/frontend/src/modules/Glossaries/components/index.js new file mode 100644 index 000000000..0a8ae90c4 --- /dev/null +++ b/frontend/src/modules/Glossaries/components/index.js @@ -0,0 +1,6 @@ +export * from './GlossaryAssociations'; +export * from './GlossaryCreateCategoryForm'; +export * from './GlossaryCreateTermForm'; +export * from './GlossaryListItem'; +export * from './GlossaryManagement'; +export * from './GlossaryNodeForm'; diff --git a/frontend/src/modules/Glossaries/services/approveTermAssociation.js b/frontend/src/modules/Glossaries/services/approveTermAssociation.js new file mode 100644 index 000000000..688eac2d2 --- /dev/null +++ b/frontend/src/modules/Glossaries/services/approveTermAssociation.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const approveTermAssociation = (linkUri) => ({ + variables: { + linkUri + }, + mutation: gql` + mutation ApproveTermAssociation($linkUri: String!) { + approveTermAssociation(linkUri: $linkUri) + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/createCategory.js b/frontend/src/modules/Glossaries/services/createCategory.js new file mode 100644 index 000000000..148a4a215 --- /dev/null +++ b/frontend/src/modules/Glossaries/services/createCategory.js @@ -0,0 +1,20 @@ +import { gql } from 'apollo-boost'; + +export const createCategory = ({ input, parentUri }) => ({ + variables: { + input, + parentUri + }, + mutation: gql` + mutation CreateCategory($parentUri: String!, $input: CreateCategoryInput) { + createCategory(parentUri: $parentUri, input: $input) { + nodeUri + label + path + readme + created + owner + } + } + ` +}); diff --git a/frontend/src/api/Glossary/createGlossary.js b/frontend/src/modules/Glossaries/services/createGlossary.js similarity index 80% rename from frontend/src/api/Glossary/createGlossary.js rename to frontend/src/modules/Glossaries/services/createGlossary.js index 681541268..37e5c3a15 100644 --- a/frontend/src/api/Glossary/createGlossary.js +++ b/frontend/src/modules/Glossaries/services/createGlossary.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createGlossary = (input) => ({ +export const createGlossary = (input) => ({ variables: { input }, @@ -17,5 +17,3 @@ const createGlossary = (input) => ({ } ` }); - -export default createGlossary; diff --git a/frontend/src/modules/Glossaries/services/createTerm.js b/frontend/src/modules/Glossaries/services/createTerm.js new file mode 100644 index 000000000..82202e8bc --- /dev/null +++ b/frontend/src/modules/Glossaries/services/createTerm.js @@ -0,0 +1,20 @@ +import { gql } from 'apollo-boost'; + +export const createTerm = ({ input, parentUri }) => ({ + variables: { + input, + parentUri + }, + mutation: gql` + mutation CreateTerm($parentUri: String!, $input: CreateTermInput) { + createTerm(parentUri: $parentUri, input: $input) { + nodeUri + label + path + readme + created + owner + } + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/deleteCategory.js b/frontend/src/modules/Glossaries/services/deleteCategory.js new file mode 100644 index 000000000..0ff1c3b9d --- /dev/null +++ b/frontend/src/modules/Glossaries/services/deleteCategory.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteCategory = (nodeUri) => ({ + variables: { + nodeUri + }, + mutation: gql` + mutation deleteCategory($nodeUri: String!) { + deleteCategory(nodeUri: $nodeUri) + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/deleteGlossary.js b/frontend/src/modules/Glossaries/services/deleteGlossary.js new file mode 100644 index 000000000..7ab462d41 --- /dev/null +++ b/frontend/src/modules/Glossaries/services/deleteGlossary.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteGlossary = (nodeUri) => ({ + variables: { + nodeUri + }, + mutation: gql` + mutation deleteGlossary($nodeUri: String!) { + deleteGlossary(nodeUri: $nodeUri) + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/deleteTerm.js b/frontend/src/modules/Glossaries/services/deleteTerm.js new file mode 100644 index 000000000..47b464c27 --- /dev/null +++ b/frontend/src/modules/Glossaries/services/deleteTerm.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteTerm = (nodeUri) => ({ + variables: { + nodeUri + }, + mutation: gql` + mutation deleteTerm($nodeUri: String!) { + deleteTerm(nodeUri: $nodeUri) + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/dismissTermAssociation.js b/frontend/src/modules/Glossaries/services/dismissTermAssociation.js new file mode 100644 index 000000000..015a2572a --- /dev/null +++ b/frontend/src/modules/Glossaries/services/dismissTermAssociation.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const dismissTermAssociation = (linkUri) => ({ + variables: { + linkUri + }, + mutation: gql` + mutation DismissTermAssociation($linkUri: String!) { + dismissTermAssociation(linkUri: $linkUri) + } + ` +}); diff --git a/frontend/src/api/Glossary/getGlossary.js b/frontend/src/modules/Glossaries/services/getGlossary.js similarity index 92% rename from frontend/src/api/Glossary/getGlossary.js rename to frontend/src/modules/Glossaries/services/getGlossary.js index db8156728..f96905462 100644 --- a/frontend/src/api/Glossary/getGlossary.js +++ b/frontend/src/modules/Glossaries/services/getGlossary.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getGlossary = (nodeUri) => ({ +export const getGlossary = (nodeUri) => ({ variables: { nodeUri }, @@ -43,5 +43,3 @@ const getGlossary = (nodeUri) => ({ } ` }); - -export default getGlossary; diff --git a/frontend/src/modules/Glossaries/services/getGlossaryTree.js b/frontend/src/modules/Glossaries/services/getGlossaryTree.js new file mode 100644 index 000000000..82177e1d0 --- /dev/null +++ b/frontend/src/modules/Glossaries/services/getGlossaryTree.js @@ -0,0 +1,81 @@ +import { gql } from 'apollo-boost'; + +export const getGlossaryTree = ({ nodeUri, filter }) => ({ + variables: { + nodeUri, + filter + }, + query: gql` + query GetGlossaryTree( + $nodeUri: String! + $filter: GlossaryNodeSearchFilter + ) { + getGlossary(nodeUri: $nodeUri) { + nodeUri + label + readme + created + owner + status + path + admin + deleted + categories { + count + page + pages + hasNext + hasPrevious + nodes { + nodeUri + parentUri + label + readme + stats { + categories + terms + } + status + created + } + } + tree(filter: $filter) { + count + hasNext + hasPrevious + page + pages + nodes { + __typename + ... on Glossary { + nodeUri + label + readme + created + owner + path + } + ... on Category { + nodeUri + label + parentUri + readme + created + owner + path + } + ... on Term { + nodeUri + parentUri + label + readme + created + owner + path + } + } + } + } + } + ` +}); diff --git a/frontend/src/modules/Glossaries/services/index.js b/frontend/src/modules/Glossaries/services/index.js new file mode 100644 index 000000000..b71e409eb --- /dev/null +++ b/frontend/src/modules/Glossaries/services/index.js @@ -0,0 +1,15 @@ +export * from './approveTermAssociation'; +export * from './createCategory'; +export * from './createGlossary'; +export * from './createTerm'; +export * from './deleteCategory'; +export * from './deleteGlossary'; +export * from './deleteTerm'; +export * from './dismissTermAssociation'; +export * from './getGlossary'; +export * from './getGlossaryTree'; +export * from './listGlossaries'; +export * from './listGlossaryAssociations'; +export * from './updateCategory'; +export * from './updateGlossary'; +export * from './updateTerm'; diff --git a/frontend/src/api/Glossary/listGlossaries.js b/frontend/src/modules/Glossaries/services/listGlossaries.js similarity index 88% rename from frontend/src/api/Glossary/listGlossaries.js rename to frontend/src/modules/Glossaries/services/listGlossaries.js index 16e6783ab..b4b29238a 100644 --- a/frontend/src/api/Glossary/listGlossaries.js +++ b/frontend/src/modules/Glossaries/services/listGlossaries.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listGlossaries = (filter) => ({ +export const listGlossaries = (filter) => ({ variables: { filter }, @@ -32,5 +32,3 @@ const listGlossaries = (filter) => ({ } ` }); - -export default listGlossaries; diff --git a/frontend/src/api/Glossary/listGlossaryAssociations.js b/frontend/src/modules/Glossaries/services/listGlossaryAssociations.js similarity index 92% rename from frontend/src/api/Glossary/listGlossaryAssociations.js rename to frontend/src/modules/Glossaries/services/listGlossaryAssociations.js index 091d5a497..58cc22ca2 100644 --- a/frontend/src/api/Glossary/listGlossaryAssociations.js +++ b/frontend/src/modules/Glossaries/services/listGlossaryAssociations.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listGlossaryAssociations = ({ nodeUri, filter }) => ({ +export const listGlossaryAssociations = ({ nodeUri, filter }) => ({ variables: { nodeUri, filter @@ -64,5 +64,3 @@ const listGlossaryAssociations = ({ nodeUri, filter }) => ({ } ` }); - -export default listGlossaryAssociations; diff --git a/frontend/src/api/Glossary/updateCategory.js b/frontend/src/modules/Glossaries/services/updateCategory.js similarity index 80% rename from frontend/src/api/Glossary/updateCategory.js rename to frontend/src/modules/Glossaries/services/updateCategory.js index 3bfabfe6f..8f35779af 100644 --- a/frontend/src/api/Glossary/updateCategory.js +++ b/frontend/src/modules/Glossaries/services/updateCategory.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateCategory = ({ input, nodeUri }) => ({ +export const updateCategory = ({ input, nodeUri }) => ({ variables: { input, nodeUri @@ -18,5 +18,3 @@ const updateCategory = ({ input, nodeUri }) => ({ } ` }); - -export default updateCategory; diff --git a/frontend/src/api/Glossary/updateGlossary.js b/frontend/src/modules/Glossaries/services/updateGlossary.js similarity index 80% rename from frontend/src/api/Glossary/updateGlossary.js rename to frontend/src/modules/Glossaries/services/updateGlossary.js index 56d67ed96..394925aa7 100644 --- a/frontend/src/api/Glossary/updateGlossary.js +++ b/frontend/src/modules/Glossaries/services/updateGlossary.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateGlossary = ({ input, nodeUri }) => ({ +export const updateGlossary = ({ input, nodeUri }) => ({ variables: { input, nodeUri @@ -18,5 +18,3 @@ const updateGlossary = ({ input, nodeUri }) => ({ } ` }); - -export default updateGlossary; diff --git a/frontend/src/api/Glossary/updateTerm.js b/frontend/src/modules/Glossaries/services/updateTerm.js similarity index 81% rename from frontend/src/api/Glossary/updateTerm.js rename to frontend/src/modules/Glossaries/services/updateTerm.js index b54cb04e9..6884ea0af 100644 --- a/frontend/src/api/Glossary/updateTerm.js +++ b/frontend/src/modules/Glossaries/services/updateTerm.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateTerm = ({ input, nodeUri }) => ({ +export const updateTerm = ({ input, nodeUri }) => ({ variables: { input, nodeUri @@ -18,5 +18,3 @@ const updateTerm = ({ input, nodeUri }) => ({ } ` }); - -export default updateTerm; diff --git a/frontend/src/views/Glossaries/GlossaryCreateForm.js b/frontend/src/modules/Glossaries/views/GlossaryCreateForm.js similarity index 95% rename from frontend/src/views/Glossaries/GlossaryCreateForm.js rename to frontend/src/modules/Glossaries/views/GlossaryCreateForm.js index 84c6c4c93..7cd4437a4 100644 --- a/frontend/src/views/Glossaries/GlossaryCreateForm.js +++ b/frontend/src/modules/Glossaries/views/GlossaryCreateForm.js @@ -1,7 +1,5 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; +import Autocomplete from '@mui/lab/Autocomplete'; import { Box, Breadcrumbs, @@ -16,18 +14,16 @@ import { TextField, Typography } from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import Autocomplete from '@mui/lab/Autocomplete'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; import React from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import createGlossary from '../../api/Glossary/createGlossary'; -import useGroups from '../../hooks/useGroups'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { ArrowLeftIcon, ChevronRightIcon, useSettings } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient, useGroups } from 'services'; +import { createGlossary } from '../services'; const GlossaryCreateForm = (props) => { const navigate = useNavigate(); diff --git a/frontend/src/views/Glossaries/GlossaryList.js b/frontend/src/modules/Glossaries/views/GlossaryList.js similarity index 84% rename from frontend/src/views/Glossaries/GlossaryList.js rename to frontend/src/modules/Glossaries/views/GlossaryList.js index 34cee8813..6754dcf6d 100644 --- a/frontend/src/views/Glossaries/GlossaryList.js +++ b/frontend/src/modules/Glossaries/views/GlossaryList.js @@ -1,5 +1,3 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; import { Box, Breadcrumbs, @@ -10,18 +8,21 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import GlossaryListItem from './GlossaryListItem'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import listGlossaries from '../../api/Glossary/listGlossaries'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + PlusIcon, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listGlossaries } from '../services'; +import { GlossaryListItem } from '../components'; function GlossariesPageHeader() { return ( @@ -74,8 +75,8 @@ function GlossariesPageHeader() { const GlossaryList = () => { const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const [inputValue, setInputValue] = useState(''); const [loading, setLoading] = useState(true); diff --git a/frontend/src/views/Glossaries/GlossaryView.js b/frontend/src/modules/Glossaries/views/GlossaryView.js similarity index 89% rename from frontend/src/views/Glossaries/GlossaryView.js rename to frontend/src/modules/Glossaries/views/GlossaryView.js index f033fcc2a..fc68ed332 100644 --- a/frontend/src/views/Glossaries/GlossaryView.js +++ b/frontend/src/modules/Glossaries/views/GlossaryView.js @@ -1,6 +1,4 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { Info } from '@mui/icons-material'; import { Box, Breadcrumbs, @@ -14,22 +12,23 @@ import { Tabs, Typography } from '@mui/material'; +import { useSnackbar } from 'notistack'; +import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaTrash } from 'react-icons/fa'; import { useNavigate } from 'react-router'; -import * as PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; -import { Info } from '@mui/icons-material'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import getGlossary from '../../api/Glossary/getGlossary'; -import GlossaryAssociations from './GlossaryAssociations'; -import GlossaryManagement from './GlossaryManagement'; -import useAuth from '../../hooks/useAuth'; -import deleteGlossary from '../../api/Glossary/deleteGlossary'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { useAuth } from 'authentication'; +import { + ChevronRightIcon, + DeleteObjectWithFrictionModal, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { deleteGlossary, getGlossary } from '../services'; +import { GlossaryAssociations, GlossaryManagement } from '../components'; const tabs = [ { label: 'Overview', value: 'overview', icon: }, @@ -97,6 +96,7 @@ GlossaryViewPageHeader.propTypes = { deleteFunction: PropTypes.func.isRequired, isAdmin: PropTypes.bool.isRequired }; + const GlossaryView = () => { const dispatch = useDispatch(); const { settings } = useSettings(); @@ -123,9 +123,7 @@ const GlossaryView = () => { const response = await client.query(getGlossary(params.uri)); if (!response.errors && response.data.getGlossary !== null) { setIsAdmin( - ['Admin'].indexOf( - response.data.getGlossary.userRoleForGlossary - ) !== -1 + ['Admin'].indexOf(response.data.getGlossary.userRoleForGlossary) !== -1 ); setGlossary(response.data.getGlossary); } else { diff --git a/frontend/src/modules/MLStudio/components/MLStudioListItem.js b/frontend/src/modules/MLStudio/components/MLStudioListItem.js new file mode 100644 index 000000000..9456ae9d6 --- /dev/null +++ b/frontend/src/modules/MLStudio/components/MLStudioListItem.js @@ -0,0 +1,265 @@ +import { + Box, + Button, + Card, + Divider, + Grid, + Link, + Tooltip, + Typography +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React from 'react'; +import * as FaIcons from 'react-icons/fa'; +import * as FiIcons from 'react-icons/fi'; +import { FiCodesandbox } from 'react-icons/fi'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, Label, StackStatus, useCardStyle } from 'design'; + +export const MLStudioListItem = (props) => { + const { mlstudiouser } = props; + const classes = useCardStyle(); + const navigate = useNavigate(); + return ( + + + + + } /> + + { + navigate( + `/console/mlstudio/${mlstudiouser.sagemakerStudioUserUri}` + ); + }} + sx={{ + width: '99%', + whiteSpace: 'nowrap', + alignItems: 'left', + overflow: 'hidden', + textOverflow: 'ellipsis', + WebkitBoxOrient: 'vertical', + WebkitLineClamp: 2 + }} + > + + {mlstudiouser.label} + + + + by{' '} + + {mlstudiouser.owner} + + + + + + + + + + {mlstudiouser.description || 'No description provided'} + + + + + + + + + Role + + + + + + + + + + + + Team + + + + + + {mlstudiouser.environment?.SamlGroupName || '-'} + + + + + + + + + + Account + + + + + {mlstudiouser.environment.AwsAccountId} + + + + + + + + + Region + + + + + {mlstudiouser.environment.region} + + + + + + + + + Status + + + + + + + + + + + + + + + + + + + + + ); +}; +MLStudioListItem.propTypes = { + mlstudiouser: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/MLStudio/components/MLStudioOverview.js b/frontend/src/modules/MLStudio/components/MLStudioOverview.js new file mode 100644 index 000000000..d66dfbe7f --- /dev/null +++ b/frontend/src/modules/MLStudio/components/MLStudioOverview.js @@ -0,0 +1,42 @@ +import { Box, Grid } from '@mui/material'; +import PropTypes from 'prop-types'; +import { ObjectBrief, ObjectMetadata } from 'design'; + +export const MLStudioOverview = (props) => { + const { mlstudiouser, ...other } = props; + + return ( + + + + 0 + ? mlstudiouser.tags + : ['-'] + } + /> + + + + + + + ); +}; + +MLStudioOverview.propTypes = { + mlstudiouser: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/MLStudio/components/index.js b/frontend/src/modules/MLStudio/components/index.js new file mode 100644 index 000000000..8ec16245b --- /dev/null +++ b/frontend/src/modules/MLStudio/components/index.js @@ -0,0 +1,2 @@ +export * from './MLStudioListItem'; +export * from './MLStudioOverview'; diff --git a/frontend/src/modules/MLStudio/services/createSagemakerStudioUser.js b/frontend/src/modules/MLStudio/services/createSagemakerStudioUser.js new file mode 100644 index 000000000..26ca3ceab --- /dev/null +++ b/frontend/src/modules/MLStudio/services/createSagemakerStudioUser.js @@ -0,0 +1,19 @@ +import { gql } from 'apollo-boost'; + +export const createSagemakerStudioUser = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation createSagemakerStudioUser($input: NewSagemakerStudioUserInput) { + createSagemakerStudioUser(input: $input) { + sagemakerStudioUserUri + name + label + created + description + tags + } + } + ` +}); diff --git a/frontend/src/modules/MLStudio/services/deleteSagemakerStudioUser.js b/frontend/src/modules/MLStudio/services/deleteSagemakerStudioUser.js new file mode 100644 index 000000000..f28bc5893 --- /dev/null +++ b/frontend/src/modules/MLStudio/services/deleteSagemakerStudioUser.js @@ -0,0 +1,22 @@ +import { gql } from 'apollo-boost'; + +export const deleteSagemakerStudioUser = ( + sagemakerStudioUserUri, + deleteFromAWS +) => ({ + variables: { + sagemakerStudioUserUri, + deleteFromAWS + }, + mutation: gql` + mutation deleteSagemakerStudioUser( + $sagemakerStudioUserUri: String! + $deleteFromAWS: Boolean + ) { + deleteSagemakerStudioUser( + sagemakerStudioUserUri: $sagemakerStudioUserUri + deleteFromAWS: $deleteFromAWS + ) + } + ` +}); diff --git a/frontend/src/modules/MLStudio/services/getSagemakerStudioUser.js b/frontend/src/modules/MLStudio/services/getSagemakerStudioUser.js new file mode 100644 index 000000000..bcea78751 --- /dev/null +++ b/frontend/src/modules/MLStudio/services/getSagemakerStudioUser.js @@ -0,0 +1,55 @@ +import { gql } from 'apollo-boost'; + +export const getSagemakerStudioUser = (sagemakerStudioUserUri) => ({ + variables: { + sagemakerStudioUserUri + }, + query: gql` + query getSagemakerStudioUser($sagemakerStudioUserUri: String!) { + getSagemakerStudioUser(sagemakerStudioUserUri: $sagemakerStudioUserUri) { + sagemakerStudioUserUri + name + owner + description + label + created + tags + userRoleForSagemakerStudioUser + sagemakerStudioUserStatus + SamlAdminGroupName + sagemakerStudioUserApps { + DomainId + UserName + AppType + AppName + Status + } + environment { + label + name + environmentUri + AwsAccountId + region + EnvironmentDefaultIAMRoleArn + } + organization { + label + name + organizationUri + } + stack { + stack + status + stackUri + targetUri + accountid + region + stackid + link + outputs + resources + } + } + } + ` +}); diff --git a/frontend/src/modules/MLStudio/services/getSagemakerStudioUserPresignedUrl.js b/frontend/src/modules/MLStudio/services/getSagemakerStudioUserPresignedUrl.js new file mode 100644 index 000000000..6255c71b9 --- /dev/null +++ b/frontend/src/modules/MLStudio/services/getSagemakerStudioUserPresignedUrl.js @@ -0,0 +1,14 @@ +import { gql } from 'apollo-boost'; + +export const getSagemakerStudioUserPresignedUrl = (sagemakerStudioUserUri) => ({ + variables: { + sagemakerStudioUserUri + }, + query: gql` + query getSagemakerStudioUserPresignedUrl($sagemakerStudioUserUri: String!) { + getSagemakerStudioUserPresignedUrl( + sagemakerStudioUserUri: $sagemakerStudioUserUri + ) + } + ` +}); diff --git a/frontend/src/modules/MLStudio/services/index.js b/frontend/src/modules/MLStudio/services/index.js new file mode 100644 index 000000000..3a3b72cc7 --- /dev/null +++ b/frontend/src/modules/MLStudio/services/index.js @@ -0,0 +1,5 @@ +export * from './createSagemakerStudioUser'; +export * from './deleteSagemakerStudioUser'; +export * from './getSagemakerStudioUser'; +export * from './getSagemakerStudioUserPresignedUrl'; +export * from './listSagemakerStudioUsers'; diff --git a/frontend/src/modules/MLStudio/services/listSagemakerStudioUsers.js b/frontend/src/modules/MLStudio/services/listSagemakerStudioUsers.js new file mode 100644 index 000000000..d32b492a2 --- /dev/null +++ b/frontend/src/modules/MLStudio/services/listSagemakerStudioUsers.js @@ -0,0 +1,46 @@ +import { gql } from 'apollo-boost'; + +export const listSagemakerStudioUsers = (filter) => ({ + variables: { + filter + }, + query: gql` + query listSagemakerStudioUsers($filter: SagemakerStudioUserFilter) { + listSagemakerStudioUsers(filter: $filter) { + count + page + pages + hasNext + hasPrevious + nodes { + sagemakerStudioUserUri + name + owner + description + label + created + tags + sagemakerStudioUserStatus + userRoleForSagemakerStudioUser + environment { + label + name + environmentUri + AwsAccountId + region + SamlGroupName + } + organization { + label + name + organizationUri + } + stack { + stack + status + } + } + } + } + ` +}); diff --git a/frontend/src/modules/MLStudio/views/MLStudioCreateForm.js b/frontend/src/modules/MLStudio/views/MLStudioCreateForm.js new file mode 100644 index 000000000..62b59ec0e --- /dev/null +++ b/frontend/src/modules/MLStudio/views/MLStudioCreateForm.js @@ -0,0 +1,422 @@ +import { LoadingButton } from '@mui/lab'; +import { + Box, + Breadcrumbs, + Button, + Card, + CardContent, + CardHeader, + CircularProgress, + Container, + FormHelperText, + Grid, + Link, + MenuItem, + TextField, + Typography +} from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, listEnvironments, useClient } from 'services'; + +import { createSagemakerStudioUser } from '../services'; + +const MLStudioCreateForm = (props) => { + const navigate = useNavigate(); + const { enqueueSnackbar } = useSnackbar(); + const dispatch = useDispatch(); + const client = useClient(); + const { settings } = useSettings(); + const [loading, setLoading] = useState(true); + const [groupOptions, setGroupOptions] = useState([]); + const [environmentOptions, setEnvironmentOptions] = useState([]); + const fetchEnvironments = useCallback(async () => { + setLoading(true); + const response = await client.query( + listEnvironments({ filter: Defaults.selectListFilter }) + ); + if (!response.errors) { + setEnvironmentOptions( + response.data.listEnvironments.nodes.map((e) => ({ + ...e, + value: e.environmentUri, + label: e.label + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch]); + const fetchGroups = async (environmentUri) => { + try { + const response = await client.query( + listEnvironmentGroups({ + filter: Defaults.selectListFilter, + environmentUri + }) + ); + if (!response.errors) { + setGroupOptions( + response.data.listEnvironmentGroups.nodes.map((g) => ({ + value: g.groupUri, + label: g.groupUri + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (e) { + dispatch({ type: SET_ERROR, error: e.message }); + } + }; + useEffect(() => { + if (client) { + fetchEnvironments().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchEnvironments]); + + async function submit(values, setStatus, setSubmitting, setErrors) { + try { + const response = await client.mutate( + createSagemakerStudioUser({ + label: values.label, + environmentUri: values.environment.environmentUri, + description: values.description, + SamlAdminGroupName: values.SamlAdminGroupName, + tags: values.tags + }) + ); + setStatus({ success: true }); + setSubmitting(false); + if (!response.errors) { + setStatus({ success: true }); + setSubmitting(false); + enqueueSnackbar('ML Studio user creation started', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate( + `/console/mlstudio/${response.data.createSagemakerStudioUser.sagemakerStudioUserUri}` + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + setStatus({ success: false }); + setErrors({ submit: err.message }); + setSubmitting(false); + } + } + if (loading) { + return ; + } + + return ( + <> + + ML Studio: Notebook Create | data.all + + + + + + + Create a new ML Studio + + } + sx={{ mt: 1 }} + > + + Play + + + ML Studio + + + Create + + + + + + + + + + + { + await submit(values, setStatus, setSubmitting, setErrors); + }} + > + {({ + errors, + handleBlur, + handleChange, + handleSubmit, + isSubmitting, + setFieldValue, + touched, + values + }) => ( +
+ + + + + + + + + + {touched.description && errors.description && ( + + + {errors.description} + + + )} + + + + + + + {groupOptions.map((group) => ( + + {group.label} + + ))} + + + + + { + setFieldValue('tags', [...chip]); + }} + /> + + + + + + + + + { + setFieldValue('SamlGroupName', ''); + fetchGroups( + event.target.value.environmentUri + ).catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + setFieldValue('environment', event.target.value); + }} + select + value={values.environment} + variant="outlined" + > + {environmentOptions.map((environment) => ( + + {environment.label} + + ))} + + + + + + + + + + {errors.submit && ( + + {errors.submit} + + )} + + + Create ML Studio user + + + + +
+ )} +
+
+
+
+ + ); +}; + +export default MLStudioCreateForm; diff --git a/frontend/src/modules/MLStudio/views/MLStudioList.js b/frontend/src/modules/MLStudio/views/MLStudioList.js new file mode 100644 index 000000000..af85d45fc --- /dev/null +++ b/frontend/src/modules/MLStudio/views/MLStudioList.js @@ -0,0 +1,172 @@ +import { + Box, + Breadcrumbs, + Button, + Container, + Grid, + Link, + Typography +} from '@mui/material'; +import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + PlusIcon, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listSagemakerStudioUsers } from '../services'; +import { MLStudioListItem } from '../components/'; + +function MLStudioPageHeader() { + return ( + + + + ML Studio + + } + sx={{ mt: 1 }} + > + + Play + + + ML Studio + + + + + + + + + + ); +} + +const MLStudioList = () => { + const dispatch = useDispatch(); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); + const { settings } = useSettings(); + const [inputValue, setInputValue] = useState(''); + const [loading, setLoading] = useState(true); + const client = useClient(); + + const fetchItems = useCallback(async () => { + setLoading(true); + const response = await client.query(listSagemakerStudioUsers(filter)); + if (!response.errors) { + setItems(response.data.listSagemakerStudioUsers); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch, filter]); + + const handleInputChange = (event) => { + setInputValue(event.target.value); + setFilter({ ...filter, term: event.target.value }); + }; + + const handleInputKeyup = (event) => { + if (event.code === 'Enter') { + setFilter({ page: 1, term: event.target.value }); + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }; + + const handlePageChange = async (event, value) => { + if (value <= items.pages && value !== items.page) { + await setFilter({ ...filter, page: value }); + } + }; + + useEffect(() => { + if (client) { + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, filter.page, dispatch, fetchItems]); + + return ( + <> + + ML Studio | data.all + + + + + + + + + + {loading ? ( + + ) : ( + + + {items.nodes.map((node) => ( + + ))} + + + + + )} + + + + + ); +}; + +export default MLStudioList; diff --git a/frontend/src/modules/MLStudio/views/MLStudioView.js b/frontend/src/modules/MLStudio/views/MLStudioView.js new file mode 100644 index 000000000..424059b76 --- /dev/null +++ b/frontend/src/modules/MLStudio/views/MLStudioView.js @@ -0,0 +1,256 @@ +import { Info } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import { + Box, + Breadcrumbs, + Button, + CircularProgress, + Container, + Divider, + Grid, + Link, + Tab, + Tabs, + Typography +} from '@mui/material'; +import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { FaAws, FaTrash } from 'react-icons/fa'; +import { SiJupyter } from 'react-icons/si'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + DeleteObjectWithFrictionModal, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + deleteSagemakerStudioUser, + getSagemakerStudioUser, + getSagemakerStudioUserPresignedUrl +} from '../services'; +import { useClient } from 'services'; +import { StackStatus, Stack } from 'modules/Shared'; +import { MLStudioOverview } from '../components'; + +const tabs = [ + { label: 'Overview', value: 'overview', icon: }, + { label: 'Stack', value: 'stack', icon: } +]; + +const MLStudioView = () => { + const dispatch = useDispatch(); + const { settings } = useSettings(); + const { enqueueSnackbar } = useSnackbar(); + const params = useParams(); + const client = useClient(); + const navigate = useNavigate(); + const [currentTab, setCurrentTab] = useState('overview'); + const [loading, setLoading] = useState(true); + const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); + const [mlstudio, setMLStudio] = useState(null); + const [stack, setStack] = useState(null); + const [isOpeningSagemakerStudio, setIsOpeningSagemakerStudio] = + useState(false); + + const handleDeleteObjectModalOpen = () => { + setIsDeleteObjectModalOpen(true); + }; + + const handleDeleteObjectModalClose = () => { + setIsDeleteObjectModalOpen(false); + }; + + const fetchItem = useCallback(async () => { + setLoading(true); + const response = await client.query(getSagemakerStudioUser(params.uri)); + if (!response.errors) { + setMLStudio(response.data.getSagemakerStudioUser); + if (stack) { + setStack(response.data.getSagemakerStudioUser.stack); + } + } else { + const error = response.errors + ? response.errors[0].message + : 'ML Studio User not found'; + dispatch({ type: SET_ERROR, error }); + } + setLoading(false); + }, [client, dispatch, params.uri, stack]); + + const getMLStudioPresignedUrl = async () => { + setIsOpeningSagemakerStudio(true); + const response = await client.query( + getSagemakerStudioUserPresignedUrl(mlstudio.sagemakerStudioUserUri) + ); + if (!response.errors) { + window.open(response.data.getSagemakerStudioUserPresignedUrl); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setIsOpeningSagemakerStudio(false); + }; + + useEffect(() => { + if (client) { + fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); + } + }, [client, dispatch, fetchItem]); + + const handleTabsChange = (event, value) => { + setCurrentTab(value); + }; + const removeMLStudio = async (deleteFromAWS = false) => { + const response = await client.mutate( + deleteSagemakerStudioUser(mlstudio.sagemakerStudioUserUri, deleteFromAWS) + ); + if (!response.errors) { + handleDeleteObjectModalClose(); + enqueueSnackbar('ML Studio User deleted', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate('/console/mlstudio'); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }; + + if (loading) { + return ; + } + if (!mlstudio) { + return null; + } + + return ( + <> + + ML Studio: User Details | DataStudio + + + + + + + + MLStudio User {mlstudio.label} + + } + sx={{ mt: 1 }} + > + + Play + + + ML Studio + + + {mlstudio.label} + + + + + + } + sx={{ m: 1 }} + onClick={getMLStudioPresignedUrl} + type="button" + variant="outlined" + > + Open JupyterLab + + + + + + + + {tabs.map((tab) => ( + + ))} + + + + + {currentTab === 'overview' && ( + + )} + {currentTab === 'stack' && ( + + )} + + + + + + ); +}; + +export default MLStudioView; diff --git a/frontend/src/views/NotFound.js b/frontend/src/modules/NotFound/views/NotFound.js similarity index 100% rename from frontend/src/views/NotFound.js rename to frontend/src/modules/NotFound/views/NotFound.js diff --git a/frontend/src/views/Notebooks/NotebookInstanceProperties.js b/frontend/src/modules/Notebooks/components/NotebookInstanceProperties.js similarity index 94% rename from frontend/src/views/Notebooks/NotebookInstanceProperties.js rename to frontend/src/modules/Notebooks/components/NotebookInstanceProperties.js index e2ae76a0a..0306f2f69 100644 --- a/frontend/src/views/Notebooks/NotebookInstanceProperties.js +++ b/frontend/src/modules/Notebooks/components/NotebookInstanceProperties.js @@ -1,5 +1,3 @@ -import React from 'react'; -import PropTypes from 'prop-types'; import { Card, CardContent, @@ -7,8 +5,10 @@ import { Divider, Typography } from '@mui/material'; +import PropTypes from 'prop-types'; +import React from 'react'; -const NotebookInstanceProperties = ({ notebook }) => ( +export const NotebookInstanceProperties = ({ notebook }) => ( @@ -57,5 +57,3 @@ const NotebookInstanceProperties = ({ notebook }) => ( NotebookInstanceProperties.propTypes = { notebook: PropTypes.object.isRequired }; - -export default NotebookInstanceProperties; diff --git a/frontend/src/modules/Notebooks/components/NotebookListItem.js b/frontend/src/modules/Notebooks/components/NotebookListItem.js new file mode 100644 index 000000000..d0f29d59b --- /dev/null +++ b/frontend/src/modules/Notebooks/components/NotebookListItem.js @@ -0,0 +1,237 @@ +import { + Box, + Button, + Card, + Divider, + Grid, + Link, + Tooltip, + Typography +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React from 'react'; +import * as FaIcons from 'react-icons/fa'; +import * as FiIcons from 'react-icons/fi'; +import { SiJupyter } from 'react-icons/si'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, Label, StackStatus, useCardStyle } from 'design'; + +/** + * @description NotebookListItem view. + * @param {Object} props + * @return {JSX.Element} + */ +export const NotebookListItem = (props) => { + const { notebook } = props; + const classes = useCardStyle(); + const navigate = useNavigate(); + + return ( + + + + + } /> + + { + navigate(`/console/notebooks/${notebook.notebookUri}`); + }} + sx={{ + width: '99%', + whiteSpace: 'nowrap', + alignItems: 'left', + overflow: 'hidden', + textOverflow: 'ellipsis', + WebkitBoxOrient: 'vertical', + WebkitLineClamp: 2 + }} + > + + {notebook.label} + + + + by{' '} + + {notebook.owner} + + + + + + + + + + + Role + + + + + + + + + + + + + + Team + + + + + + {notebook.SamlAdminGroupName || '-'} + + + + + + + + + + Account + + + + + {notebook.environment.AwsAccountId} + + + + + + + + + Region + + + + + {notebook.environment.region} + + + + + + + + + Status + + + + + + + + + + + + + + + + + + + + + + ); +}; + +NotebookListItem.propTypes = { + notebook: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/Notebooks/components/NotebookOverview.js b/frontend/src/modules/Notebooks/components/NotebookOverview.js new file mode 100644 index 000000000..149c25e94 --- /dev/null +++ b/frontend/src/modules/Notebooks/components/NotebookOverview.js @@ -0,0 +1,49 @@ +import { Box, Grid } from '@mui/material'; +import PropTypes from 'prop-types'; +import { ObjectBrief, ObjectMetadata } from 'design'; +import { NotebookInstanceProperties } from './NotebookInstanceProperties'; + +/** + * @description NotebookOverview view. + * @param {NotebookOverview.propTypes} props + * @return {JSX.Element} + */ +export const NotebookOverview = (props) => { + const { notebook, ...other } = props; + + return ( + + + + 0 ? notebook.tags : ['-'] + } + /> + + + + + + + + + + ); +}; + +NotebookOverview.propTypes = { + notebook: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/Notebooks/components/index.js b/frontend/src/modules/Notebooks/components/index.js new file mode 100644 index 000000000..6fbfb2873 --- /dev/null +++ b/frontend/src/modules/Notebooks/components/index.js @@ -0,0 +1,3 @@ +export * from './NotebookInstanceProperties'; +export * from './NotebookListItem'; +export * from './NotebookOverview'; diff --git a/frontend/src/api/SagemakerNotebook/createSagemakerNotebook.js b/frontend/src/modules/Notebooks/services/createSagemakerNotebook.js similarity index 78% rename from frontend/src/api/SagemakerNotebook/createSagemakerNotebook.js rename to frontend/src/modules/Notebooks/services/createSagemakerNotebook.js index f15e4eda7..c555b8bd3 100644 --- a/frontend/src/api/SagemakerNotebook/createSagemakerNotebook.js +++ b/frontend/src/modules/Notebooks/services/createSagemakerNotebook.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createSagemakerNotebook = (input) => ({ +export const createSagemakerNotebook = (input) => ({ variables: { input }, @@ -17,5 +17,3 @@ const createSagemakerNotebook = (input) => ({ } ` }); - -export default createSagemakerNotebook; diff --git a/frontend/src/api/SagemakerNotebook/deleteSagemakerNotebook.js b/frontend/src/modules/Notebooks/services/deleteSagemakerNotebook.js similarity index 75% rename from frontend/src/api/SagemakerNotebook/deleteSagemakerNotebook.js rename to frontend/src/modules/Notebooks/services/deleteSagemakerNotebook.js index 26720dba8..6f6b6138d 100644 --- a/frontend/src/api/SagemakerNotebook/deleteSagemakerNotebook.js +++ b/frontend/src/modules/Notebooks/services/deleteSagemakerNotebook.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const deleteSagemakerNotebook = (notebookUri, deleteFromAWS) => ({ +export const deleteSagemakerNotebook = (notebookUri, deleteFromAWS) => ({ variables: { notebookUri, deleteFromAWS @@ -17,5 +17,3 @@ const deleteSagemakerNotebook = (notebookUri, deleteFromAWS) => ({ } ` }); - -export default deleteSagemakerNotebook; diff --git a/frontend/src/api/SagemakerNotebook/getSagemakerNotebook.js b/frontend/src/modules/Notebooks/services/getSagemakerNotebook.js similarity index 92% rename from frontend/src/api/SagemakerNotebook/getSagemakerNotebook.js rename to frontend/src/modules/Notebooks/services/getSagemakerNotebook.js index 8e7f6da2a..46f7c129b 100644 --- a/frontend/src/api/SagemakerNotebook/getSagemakerNotebook.js +++ b/frontend/src/modules/Notebooks/services/getSagemakerNotebook.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getNotebook = (notebookUri) => ({ +export const getSagemakerNotebook = (notebookUri) => ({ variables: { notebookUri }, @@ -49,5 +49,3 @@ const getNotebook = (notebookUri) => ({ } ` }); - -export default getNotebook; diff --git a/frontend/src/modules/Notebooks/services/getSagemakerNotebookPresignedUrl.js b/frontend/src/modules/Notebooks/services/getSagemakerNotebookPresignedUrl.js new file mode 100644 index 000000000..52923fa66 --- /dev/null +++ b/frontend/src/modules/Notebooks/services/getSagemakerNotebookPresignedUrl.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getSagemakerNotebookPresignedUrl = (notebookUri) => ({ + variables: { + notebookUri + }, + query: gql` + query getSagemakerNotebookPresignedUrl($notebookUri: String!) { + getSagemakerNotebookPresignedUrl(notebookUri: $notebookUri) + } + ` +}); diff --git a/frontend/src/modules/Notebooks/services/index.js b/frontend/src/modules/Notebooks/services/index.js new file mode 100644 index 000000000..8dbc155f9 --- /dev/null +++ b/frontend/src/modules/Notebooks/services/index.js @@ -0,0 +1,7 @@ +export * from './createSagemakerNotebook'; +export * from './deleteSagemakerNotebook'; +export * from './getSagemakerNotebook'; +export * from './getSagemakerNotebookPresignedUrl'; +export * from './listSagemakerNotebooks'; +export * from './startNotebookInstance'; +export * from './stopNotebookInstance'; diff --git a/frontend/src/api/SagemakerNotebook/listSagemakerNotebooks.js b/frontend/src/modules/Notebooks/services/listSagemakerNotebooks.js similarity index 90% rename from frontend/src/api/SagemakerNotebook/listSagemakerNotebooks.js rename to frontend/src/modules/Notebooks/services/listSagemakerNotebooks.js index cc42b9c3e..e73057fc3 100644 --- a/frontend/src/api/SagemakerNotebook/listSagemakerNotebooks.js +++ b/frontend/src/modules/Notebooks/services/listSagemakerNotebooks.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listSagemakerNotebooks = (filter) => ({ +export const listSagemakerNotebooks = (filter) => ({ variables: { filter }, @@ -44,5 +44,3 @@ const listSagemakerNotebooks = (filter) => ({ } ` }); - -export default listSagemakerNotebooks; diff --git a/frontend/src/modules/Notebooks/services/startNotebookInstance.js b/frontend/src/modules/Notebooks/services/startNotebookInstance.js new file mode 100644 index 000000000..f17155cc8 --- /dev/null +++ b/frontend/src/modules/Notebooks/services/startNotebookInstance.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const startSagemakerNotebook = (notebookUri) => ({ + variables: { + notebookUri + }, + mutation: gql` + mutation StartSagemakerNotebook($notebookUri: String!) { + startSagemakerNotebook(notebookUri: $notebookUri) + } + ` +}); diff --git a/frontend/src/modules/Notebooks/services/stopNotebookInstance.js b/frontend/src/modules/Notebooks/services/stopNotebookInstance.js new file mode 100644 index 000000000..28910d907 --- /dev/null +++ b/frontend/src/modules/Notebooks/services/stopNotebookInstance.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const stopSagemakerNotebook = (notebookUri) => ({ + variables: { + notebookUri + }, + mutation: gql` + mutation StopSagemakerNotebook($notebookUri: String!) { + stopSagemakerNotebook(notebookUri: $notebookUri) + } + ` +}); diff --git a/frontend/src/modules/Notebooks/views/NotebookCreateForm.js b/frontend/src/modules/Notebooks/views/NotebookCreateForm.js new file mode 100644 index 000000000..fac8ae4b2 --- /dev/null +++ b/frontend/src/modules/Notebooks/views/NotebookCreateForm.js @@ -0,0 +1,591 @@ +import { Autocomplete, LoadingButton } from '@mui/lab'; +import { + Box, + Breadcrumbs, + Button, + Card, + CardContent, + CardHeader, + CircularProgress, + Container, + FormHelperText, + Grid, + Link, + MenuItem, + Slider, + TextField, + Typography +} from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, listEnvironments, useClient } from 'services'; +import { createSagemakerNotebook } from '../services'; + +const NotebookCreateForm = (props) => { + const navigate = useNavigate(); + const { enqueueSnackbar } = useSnackbar(); + const dispatch = useDispatch(); + const client = useClient(); + const { settings } = useSettings(); + const [loading, setLoading] = useState(true); + const [groupOptions, setGroupOptions] = useState([]); + const [environmentOptions, setEnvironmentOptions] = useState([]); + const [vpcOptions, setVpcOptions] = useState([]); + const [subnetOptions, setSubnetOptions] = useState([]); + const marks = [ + { + value: 32, + label: '32' + }, + { + value: 64, + label: '64' + }, + { + value: 128, + label: '128' + }, + { + value: 256, + label: '256' + } + ]; + const instanceTypes = [ + { label: 'ml.t3.medium', value: 'ml.t3.medium' }, + { label: 'ml.t3.large', value: 'ml.t3.large' }, + { label: 'ml.m5.xlarge', value: 'ml.m5.xlarge' } + ]; + + const fetchEnvironments = useCallback(async () => { + setLoading(true); + const response = await client.query( + listEnvironments({ filter: Defaults.selectListFilter }) + ); + if (!response.errors) { + setEnvironmentOptions( + response.data.listEnvironments.nodes.map((e) => ({ + ...e, + value: e.environmentUri, + label: e.label + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch]); + const fetchGroups = async (environmentUri) => { + try { + const response = await client.query( + listEnvironmentGroups({ + filter: Defaults.selectListFilter, + environmentUri + }) + ); + if (!response.errors) { + setGroupOptions( + response.data.listEnvironmentGroups.nodes.map((g) => ({ + value: g.groupUri, + label: g.groupUri + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (e) { + dispatch({ type: SET_ERROR, error: e.message }); + } + }; + useEffect(() => { + if (client) { + fetchEnvironments().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchEnvironments]); + + async function submit(values, setStatus, setSubmitting, setErrors) { + try { + const response = await client.mutate( + createSagemakerNotebook({ + label: values.label, + environmentUri: values.environment.environmentUri, + description: values.description, + SamlAdminGroupName: values.SamlAdminGroupName, + tags: values.tags, + VpcId: values.VpcId, + SubnetId: values.SubnetId, + VolumeSizeInGB: values.VolumeSizeInGB, + InstanceType: values.InstanceType + }) + ); + if (!response.errors) { + setStatus({ success: true }); + setSubmitting(false); + enqueueSnackbar('Sagemaker instance creation started', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate( + `/console/notebooks/${response.data.createSagemakerNotebook.notebookUri}` + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + setStatus({ success: false }); + setErrors({ submit: err.message }); + setSubmitting(false); + } + } + if (loading) { + return ; + } + + return ( + <> + + Notebooks: Notebook Create | data.all + + + + + + + Create a new notebook + + } + sx={{ mt: 1 }} + > + + Play + + + Notebooks + + + Create + + + + + + + + + + + { + await submit(values, setStatus, setSubmitting, setErrors); + }} + > + {({ + errors, + handleBlur, + handleChange, + handleSubmit, + isSubmitting, + setFieldValue, + touched, + values + }) => ( +
+ + + + + + + + + + {touched.description && errors.description && ( + + + {errors.description} + + + )} + + + + { + setFieldValue('tags', [...chip]); + }} + /> + + + + + + + + {instanceTypes.map((i) => ( + + {i.label} + + ))} + + + + + + Volume size + + { + setFieldValue('VolumeSizeInGB', value); + }} + /> + {touched.VolumeSizeInGB && + errors.VolumeSizeInGB && ( + + + {errors.VolumeSizeInGB} + + + )} + + + + + + + + + { + setFieldValue('SamlAdminGroupName', ''); + fetchGroups( + event.target.value.environmentUri + ).catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + setFieldValue('environment', event.target.value); + setVpcOptions( + event.target.value.networks.map((v) => ({ + ...v, + value: v, + label: v.VpcId + })) + ); + }} + select + value={values.environment} + variant="outlined" + > + {environmentOptions.map((environment) => ( + + {environment.label} + + ))} + + + + + + + + + + + {groupOptions.map((group) => ( + + {group.label} + + ))} + + + + + + + + option.label)} + onChange={(event, value) => { + setSubnetOptions([]); + const filteredVpc = vpcOptions.filter( + (v) => v.VpcId === value + ); + if ( + value && + vpcOptions && + filteredVpc.length === 1 + ) { + setSubnetOptions( + filteredVpc[0].privateSubnetIds.concat( + filteredVpc[0].publicSubnetIds + ) + ); + setFieldValue('VpcId', value); + } else { + setFieldValue('VpcId', value); + } + }} + renderInput={(params) => ( + + )} + /> + + + option)} + onChange={(event, value) => { + setFieldValue('SubnetId', value); + }} + renderInput={(params) => ( + + )} + /> + + + + + {errors.submit && ( + + {errors.submit} + + )} + + + Create Notebook + + + + +
+ )} +
+
+
+
+ + ); +}; + +export default NotebookCreateForm; diff --git a/frontend/src/modules/Notebooks/views/NotebookList.js b/frontend/src/modules/Notebooks/views/NotebookList.js new file mode 100644 index 000000000..23dd8b457 --- /dev/null +++ b/frontend/src/modules/Notebooks/views/NotebookList.js @@ -0,0 +1,172 @@ +import { + Box, + Breadcrumbs, + Button, + Container, + Grid, + Link, + Typography +} from '@mui/material'; +import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + PlusIcon, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { NotebookListItem } from '../components'; +import { listSagemakerNotebooks } from '../services'; + +function NotebookPageHeader() { + return ( + + + + Notebooks + + } + sx={{ mt: 1 }} + > + + Play + + + Notebooks + + + + + + + + + + ); +} + +const NotebookList = () => { + const dispatch = useDispatch(); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); + const { settings } = useSettings(); + const [inputValue, setInputValue] = useState(''); + const [loading, setLoading] = useState(true); + const client = useClient(); + + const fetchItems = useCallback(async () => { + setLoading(true); + const response = await client.query(listSagemakerNotebooks(filter)); + if (!response.errors) { + setItems(response.data.listSagemakerNotebooks); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch, filter]); + + const handleInputChange = (event) => { + setInputValue(event.target.value); + setFilter({ ...filter, term: event.target.value }); + }; + + const handleInputKeyup = (event) => { + if (event.code === 'Enter') { + setFilter({ page: 1, term: event.target.value }); + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }; + + const handlePageChange = async (event, value) => { + if (value <= items.pages && value !== items.page) { + await setFilter({ ...filter, page: value }); + } + }; + + useEffect(() => { + if (client) { + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, filter.page, fetchItems, dispatch]); + + return ( + <> + + Notebooks | data.all + + + + + + + + + + {loading ? ( + + ) : ( + + + {items.nodes.map((node) => ( + + ))} + + + + + )} + + + + + ); +}; + +export default NotebookList; diff --git a/frontend/src/modules/Notebooks/views/NotebookView.js b/frontend/src/modules/Notebooks/views/NotebookView.js new file mode 100644 index 000000000..4681da260 --- /dev/null +++ b/frontend/src/modules/Notebooks/views/NotebookView.js @@ -0,0 +1,401 @@ +import { Info, LocalOffer, RefreshRounded } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import { + Box, + Breadcrumbs, + Button, + CircularProgress, + Container, + Divider, + Grid, + Link, + Tab, + Tabs, + Typography +} from '@mui/material'; +import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { BiStopCircle } from 'react-icons/bi'; +import { FaAws, FaTrash } from 'react-icons/fa'; +import { SiJupyter } from 'react-icons/si'; +import { VscDebugStart } from 'react-icons/vsc'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + DeleteObjectWithFrictionModal, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { KeyValueTagList, StackStatus, Stack } from 'modules/Shared'; +import { NotebookOverview } from '../components'; +import { + deleteSagemakerNotebook, + getSagemakerNotebook, + getSagemakerNotebookPresignedUrl, + startSagemakerNotebook, + stopSagemakerNotebook +} from '../services'; + +/** + * @description NotebookView component. + * @returns {JSX.Element|null} + */ +const NotebookView = () => { + const getTabs = (isAdvancedMode) => + isAdvancedMode + ? [ + { + label: 'Overview', + value: 'overview', + icon: + }, + { + label: 'Tags', + value: 'tags', + icon: + }, + { label: 'Stack', value: 'stack', icon: } + ] + : []; + const dispatch = useDispatch(); + const { settings } = useSettings(); + const { enqueueSnackbar } = useSnackbar(); + const params = useParams(); + const client = useClient(); + const navigate = useNavigate(); + const [currentTab, setCurrentTab] = useState('overview'); + const [loading, setLoading] = useState(true); + const [isStoppingNotebook, setIsStoppingNotebook] = useState(false); + const [isStartingNotebook, setIsStartingNotebook] = useState(false); + const [isRefreshingNotebook, setIsRefreshingNotebook] = useState(false); + const [notebook, setNotebook] = useState(null); + const [stack, setStack] = useState(null); + const [isOpeningSagemakerNotebook, setIsOpeningSagemakerNotebook] = + useState(false); + const [isStoppedInstance, setIsStoppedInstance] = useState({}); + const [isNotFoundInstance, setNotFoundInstance] = useState({}); + const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); + const [tabs, setTabs] = useState(getTabs(settings.isAdvancedMode)); + + useEffect( + () => setTabs(getTabs(settings.isAdvancedMode)), + [settings.isAdvancedMode] + ); + + const handleDeleteObjectModalOpen = () => { + setIsDeleteObjectModalOpen(true); + }; + + const handleDeleteObjectModalClose = () => { + setIsDeleteObjectModalOpen(false); + }; + + const getNotebookInstance = useCallback(async () => { + const response = await client.query(getSagemakerNotebook(params.uri)); + if (!response.errors) { + setNotebook(response.data.getSagemakerNotebook); + if (response.data.getSagemakerNotebook.stack) { + setStack(response.data.getSagemakerNotebook.stack); + } + const status = response.data.getSagemakerNotebook.NotebookInstanceStatus; + if (status === 'Stopped' || status === 'Stopping') { + setIsStoppedInstance(true); + } else { + setIsStoppedInstance(false); + } + if (status === 'NotFound' || status === 'Pending') { + setNotFoundInstance(true); + } else { + setNotFoundInstance(false); + } + } else { + const error = response.errors + ? response.errors[0].message + : 'Notebook not found'; + dispatch({ type: SET_ERROR, error }); + } + }, [params.uri, client, dispatch]); + + const refreshInstance = async () => { + setIsRefreshingNotebook(true); + await getNotebookInstance(); + enqueueSnackbar('Notebook instance reloaded', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + setIsRefreshingNotebook(false); + }; + + const fetchItem = useCallback(async () => { + setLoading(true); + await getNotebookInstance(); + setLoading(false); + }, [getNotebookInstance]); + + const removeNotebook = async (deleteFromAWS = false) => { + const response = await client.mutate( + deleteSagemakerNotebook(notebook.notebookUri, deleteFromAWS) + ); + if (!response.errors) { + handleDeleteObjectModalClose(); + enqueueSnackbar('Notebook deleted', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate('/console/notebooks'); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }; + const getNotebookPresignedUrl = async () => { + setIsOpeningSagemakerNotebook(true); + const response = await client.query( + getSagemakerNotebookPresignedUrl(notebook.notebookUri) + ); + if (!response.errors) { + window.open(response.data.getSagemakerNotebookPresignedUrl); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setIsOpeningSagemakerNotebook(false); + }; + + useEffect(() => { + if (client) { + fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); + } + }, [client, fetchItem, dispatch]); + + const handleTabsChange = (event, value) => { + setCurrentTab(value); + }; + const stopNotebook = async () => { + setIsStoppingNotebook(true); + const response = await client.mutate( + stopSagemakerNotebook(notebook.notebookUri) + ); + if (!response.errors) { + enqueueSnackbar('Notebook instance is stopping', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + setIsStoppingNotebook(false); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }; + + const startNotebook = async () => { + setIsStartingNotebook(true); + const response = await client.mutate( + startSagemakerNotebook(notebook.notebookUri) + ); + if (!response.errors) { + enqueueSnackbar('Notebook instance starting', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + setIsStartingNotebook(false); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + }; + + if (loading) { + return ; + } + if (!notebook) { + return null; + } + + /** + * @description Tab header. + * @type {JSX.Element} + */ + const tabHeader = ( + <> + + + {tabs.map((tab) => ( + + ))} + + + + + ); + + return ( + <> + + Notebooks: Notebook Details | data.all + + + + + + + + Notebook {notebook.label} + + } + sx={{ mt: 1 }} + > + + Play + + + Notebooks + + + {notebook.label} + + + + + + } + sx={{ m: 1 }} + onClick={getNotebookPresignedUrl} + type="button" + variant="outlined" + > + Open JupyterLab + + } + sx={{ m: 1 }} + onClick={stopNotebook} + type="button" + variant="outlined" + > + Stop Instance + + } + sx={{ m: 1 }} + onClick={startNotebook} + type="button" + variant="outlined" + > + Start Instance + + } + sx={{ m: 1 }} + variant="outlined" + onClick={refreshInstance} + > + Refresh + + + + + + {settings.isAdvancedMode && tabHeader} + + {currentTab === 'overview' && ( + + )} + {currentTab === 'tags' && ( + + )} + {currentTab === 'stack' && ( + + )} + + + + + + ); +}; + +export default NotebookView; diff --git a/frontend/src/views/Organizations/OrganizationEnvironments.js b/frontend/src/modules/Organizations/components/OrganizationEnvironments.js similarity index 86% rename from frontend/src/views/Organizations/OrganizationEnvironments.js rename to frontend/src/modules/Organizations/components/OrganizationEnvironments.js index f62b0d5f0..341f996aa 100644 --- a/frontend/src/views/Organizations/OrganizationEnvironments.js +++ b/frontend/src/modules/Organizations/components/OrganizationEnvironments.js @@ -1,4 +1,4 @@ -import React, { useCallback, useEffect, useState } from 'react'; +import { Link } from '@mui/icons-material'; import { Box, Button, @@ -15,38 +15,39 @@ import { TableRow, TextField } from '@mui/material'; -import PropTypes from 'prop-types'; import CircularProgress from '@mui/material/CircularProgress'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { FaAws } from 'react-icons/fa'; import { useNavigate } from 'react-router'; import { Link as RouterLink } from 'react-router-dom'; -import { Link } from '@mui/icons-material'; -import { FaAws } from 'react-icons/fa'; -import Scrollbar from '../../components/Scrollbar'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ArrowRightIcon from '../../icons/ArrowRight'; -import StackStatus from '../../components/StackStatus'; -import SearchIcon from '../../icons/Search'; -import listOrganizationEnvrionments from '../../api/Environment/listOrganizationEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; +import { + ArrowRightIcon, + Defaults, + Pager, + RefreshTableMenu, + Scrollbar, + SearchIcon, + StackStatus +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listOrganizationEnvironments } from '../services'; -const OrganizationEnvironments = (props) => { +export const OrganizationEnvironments = (props) => { const { organization, ...other } = props; const client = useClient(); const navigate = useNavigate(); const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const [loading, setLoading] = useState(null); const [inputValue, setInputValue] = useState(''); const fetchItems = useCallback(async () => { setLoading(true); const response = await client.query( - listOrganizationEnvrionments({ + listOrganizationEnvironments({ filter, organizationUri: organization.organizationUri }) @@ -202,4 +203,3 @@ const OrganizationEnvironments = (props) => { OrganizationEnvironments.propTypes = { organization: PropTypes.object.isRequired }; -export default OrganizationEnvironments; diff --git a/frontend/src/views/Organizations/OrganizationListItem.js b/frontend/src/modules/Organizations/components/OrganizationListItem.js similarity index 96% rename from frontend/src/views/Organizations/OrganizationListItem.js rename to frontend/src/modules/Organizations/components/OrganizationListItem.js index b3c4b6cd9..80f38d65f 100644 --- a/frontend/src/views/Organizations/OrganizationListItem.js +++ b/frontend/src/modules/Organizations/components/OrganizationListItem.js @@ -8,18 +8,16 @@ import { Tooltip, Typography } from '@mui/material'; -import * as BiIcon from 'react-icons/bi'; -import { Link as RouterLink } from 'react-router-dom'; import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; +import React from 'react'; +import * as BiIcon from 'react-icons/bi'; import * as FaIcons from 'react-icons/fa'; import { FaUserPlus } from 'react-icons/fa'; -import React from 'react'; -import IconAvatar from '../../components/IconAvatar'; -import Label from '../../components/Label'; -import useCardStyle from '../../hooks/useCardStyle'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, Label, useCardStyle } from 'design'; -const OrganizationListItem = (props) => { +export const OrganizationListItem = (props) => { const { organization } = props; const classes = useCardStyle(); const navigate = useNavigate(); @@ -243,4 +241,3 @@ const OrganizationListItem = (props) => { OrganizationListItem.propTypes = { organization: PropTypes.object.isRequired }; -export default OrganizationListItem; diff --git a/frontend/src/views/Organizations/OrganizationOverview.js b/frontend/src/modules/Organizations/components/OrganizationOverview.js similarity index 81% rename from frontend/src/views/Organizations/OrganizationOverview.js rename to frontend/src/modules/Organizations/components/OrganizationOverview.js index bfdf555b9..4cd970e7f 100644 --- a/frontend/src/views/Organizations/OrganizationOverview.js +++ b/frontend/src/modules/Organizations/components/OrganizationOverview.js @@ -1,9 +1,8 @@ -import PropTypes from 'prop-types'; import { Box, Grid } from '@mui/material'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; +import PropTypes from 'prop-types'; +import { ObjectBrief, ObjectMetadata } from 'design'; -const OrganizationOverview = (props) => { +export const OrganizationOverview = (props) => { const { organization, ...other } = props; return ( @@ -32,5 +31,3 @@ const OrganizationOverview = (props) => { OrganizationOverview.propTypes = { organization: PropTypes.object.isRequired }; - -export default OrganizationOverview; diff --git a/frontend/src/views/Organizations/OrganizationTeamInviteEditForm.js b/frontend/src/modules/Organizations/components/OrganizationTeamInviteEditForm.js similarity index 96% rename from frontend/src/views/Organizations/OrganizationTeamInviteEditForm.js rename to frontend/src/modules/Organizations/components/OrganizationTeamInviteEditForm.js index 2a3bd3dd1..4168f3293 100644 --- a/frontend/src/views/Organizations/OrganizationTeamInviteEditForm.js +++ b/frontend/src/modules/Organizations/components/OrganizationTeamInviteEditForm.js @@ -14,7 +14,7 @@ import { Typography } from '@mui/material'; -const OrganizationTeamInviteEditForm = (props) => { +export const OrganizationTeamInviteEditForm = (props) => { const { organization, team, onClose, open, reloadTeams, ...other } = props; const [permissions] = useState([ { @@ -106,5 +106,3 @@ OrganizationTeamInviteEditForm.propTypes = { reloadTeams: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default OrganizationTeamInviteEditForm; diff --git a/frontend/src/views/Organizations/OrganizationTeamInviteForm.js b/frontend/src/modules/Organizations/components/OrganizationTeamInviteForm.js similarity index 95% rename from frontend/src/views/Organizations/OrganizationTeamInviteForm.js rename to frontend/src/modules/Organizations/components/OrganizationTeamInviteForm.js index d0ea4b11b..7dd089b6f 100644 --- a/frontend/src/views/Organizations/OrganizationTeamInviteForm.js +++ b/frontend/src/modules/Organizations/components/OrganizationTeamInviteForm.js @@ -1,6 +1,6 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { GroupAddOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import Autocomplete from '@mui/lab/Autocomplete'; import { Box, Card, @@ -16,18 +16,16 @@ import { TextField, Typography } from '@mui/material'; -import Autocomplete from '@mui/lab/Autocomplete'; import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; -import { GroupAddOutlined } from '@mui/icons-material'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import inviteGroupToOrganization from '../../api/Organization/inviteGroup'; -import listCognitoGroups from '../../api/Groups/listCognitoGroups'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listCognitoGroups, useClient } from 'services'; +import { inviteGroupToOrganization } from '../services'; -const OrganizationTeamInviteForm = (props) => { +export const OrganizationTeamInviteForm = (props) => { const { organization, onClose, open, reloadTeams, ...other } = props; const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -38,9 +36,9 @@ const OrganizationTeamInviteForm = (props) => { const [groupOptions, setGroupOptions] = useState([]); const filter = { - type: "organization", + type: 'organization', uri: organization.organizationUri - } + }; const fetchGroups = useCallback(async () => { try { @@ -280,5 +278,3 @@ OrganizationTeamInviteForm.propTypes = { reloadTeams: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default OrganizationTeamInviteForm; diff --git a/frontend/src/views/Organizations/OrganizationTeams.js b/frontend/src/modules/Organizations/components/OrganizationTeams.js similarity index 89% rename from frontend/src/views/Organizations/OrganizationTeams.js rename to frontend/src/modules/Organizations/components/OrganizationTeams.js index a5fba7cbb..f4288c02f 100644 --- a/frontend/src/views/Organizations/OrganizationTeams.js +++ b/frontend/src/modules/Organizations/components/OrganizationTeams.js @@ -1,6 +1,5 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import * as BsIcons from 'react-icons/bs'; +import { GroupAddOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Button, @@ -18,25 +17,31 @@ import { TextField } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { GroupAddOutlined } from '@mui/icons-material'; -import { useSnackbar } from 'notistack'; -import { LoadingButton } from '@mui/lab'; import { useTheme } from '@mui/styles'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import * as BsIcons from 'react-icons/bs'; import { HiUserRemove } from 'react-icons/hi'; import { VscChecklist } from 'react-icons/vsc'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import Label from '../../components/Label'; -import OrganizationTeamInviteForm from './OrganizationTeamInviteForm'; -import removeGroupFromOrganization from '../../api/Organization/removeGroup'; -import listOrganizationGroups from '../../api/Organization/listOrganizationGroups'; -import OrganizationTeamInviteEditForm from './OrganizationTeamInviteEditForm'; +import { + Defaults, + Label, + Pager, + RefreshTableMenu, + Scrollbar, + SearchIcon +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { + listOrganizationGroups, + removeGroupFromOrganization +} from '../services'; +import { + OrganizationTeamInviteEditForm, + OrganizationTeamInviteForm +} from '../components'; function TeamRow({ team, organization, fetchItems }) { const client = useClient(); @@ -142,11 +147,12 @@ TeamRow.propTypes = { organization: PropTypes.any, fetchItems: PropTypes.any }; -const OrganizationTeams = ({ organization }) => { + +export const OrganizationTeams = ({ organization }) => { const client = useClient(); const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const [loading, setLoading] = useState(true); const [inputValue, setInputValue] = useState(''); const [isTeamInviteModalOpen, setIsTeamInviteModalOpen] = useState(false); @@ -320,5 +326,3 @@ const OrganizationTeams = ({ organization }) => { OrganizationTeams.propTypes = { organization: PropTypes.object.isRequired }; - -export default OrganizationTeams; diff --git a/frontend/src/modules/Organizations/components/index.js b/frontend/src/modules/Organizations/components/index.js new file mode 100644 index 000000000..9b154882d --- /dev/null +++ b/frontend/src/modules/Organizations/components/index.js @@ -0,0 +1,6 @@ +export * from './OrganizationEnvironments'; +export * from './OrganizationListItem'; +export * from './OrganizationOverview'; +export * from './OrganizationTeamInviteEditForm'; +export * from './OrganizationTeamInviteForm'; +export * from './OrganizationTeams'; diff --git a/frontend/src/modules/Organizations/services/archiveOrganization.js b/frontend/src/modules/Organizations/services/archiveOrganization.js new file mode 100644 index 000000000..da3af9c77 --- /dev/null +++ b/frontend/src/modules/Organizations/services/archiveOrganization.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const archiveOrganization = (organizationUri) => ({ + variables: { + organizationUri + }, + mutation: gql` + mutation ArciveOrg($organizationUri: String!) { + archiveOrganization(organizationUri: $organizationUri) + } + ` +}); diff --git a/frontend/src/api/Organization/createOrganization.js b/frontend/src/modules/Organizations/services/createOrganization.js similarity index 76% rename from frontend/src/api/Organization/createOrganization.js rename to frontend/src/modules/Organizations/services/createOrganization.js index 2937c08f2..fc3070266 100644 --- a/frontend/src/api/Organization/createOrganization.js +++ b/frontend/src/modules/Organizations/services/createOrganization.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createOrganization = (input) => ({ +export const createOrganization = (input) => ({ variables: { input }, @@ -14,5 +14,3 @@ const createOrganization = (input) => ({ } ` }); - -export default createOrganization; diff --git a/frontend/src/modules/Organizations/services/index.js b/frontend/src/modules/Organizations/services/index.js new file mode 100644 index 000000000..4253c2f03 --- /dev/null +++ b/frontend/src/modules/Organizations/services/index.js @@ -0,0 +1,8 @@ +export * from './archiveOrganization'; +export * from './createOrganization'; +export * from './inviteGroup'; +export * from './listOrganizationEnvironments'; +export * from './listOrganizationGroups'; +export * from './listOrganizations'; +export * from './removeGroup'; +export * from './updateOrganization'; diff --git a/frontend/src/modules/Organizations/services/inviteGroup.js b/frontend/src/modules/Organizations/services/inviteGroup.js new file mode 100644 index 000000000..4c9cb4d8b --- /dev/null +++ b/frontend/src/modules/Organizations/services/inviteGroup.js @@ -0,0 +1,16 @@ +import { gql } from 'apollo-boost'; + +export const inviteGroupToOrganization = (input) => ({ + variables: { + input + }, + mutation: gql` + mutation inviteGroupToOrganization( + $input: InviteGroupToOrganizationInput! + ) { + inviteGroupToOrganization(input: $input) { + organizationUri + } + } + ` +}); diff --git a/frontend/src/api/Environment/listOrganizationEnvironments.js b/frontend/src/modules/Organizations/services/listOrganizationEnvironments.js similarity index 80% rename from frontend/src/api/Environment/listOrganizationEnvironments.js rename to frontend/src/modules/Organizations/services/listOrganizationEnvironments.js index 23834051a..8f70ce967 100644 --- a/frontend/src/api/Environment/listOrganizationEnvironments.js +++ b/frontend/src/modules/Organizations/services/listOrganizationEnvironments.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listOrganizationEnvironments = ({ organizationUri, filter }) => ({ +export const listOrganizationEnvironments = ({ organizationUri, filter }) => ({ variables: { organizationUri, filter @@ -32,11 +32,6 @@ const listOrganizationEnvironments = ({ organizationUri, filter }) => ({ tags environmentType AwsAccountId - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled userRoleInEnvironment stack { stack @@ -50,11 +45,13 @@ const listOrganizationEnvironments = ({ organizationUri, filter }) => ({ outputs resources } + parameters { + key + value + } } } } } ` }); - -export default listOrganizationEnvironments; diff --git a/frontend/src/api/Organization/listOrganizationGroups.js b/frontend/src/modules/Organizations/services/listOrganizationGroups.js similarity index 82% rename from frontend/src/api/Organization/listOrganizationGroups.js rename to frontend/src/modules/Organizations/services/listOrganizationGroups.js index 7370537bc..9bb14500f 100644 --- a/frontend/src/api/Organization/listOrganizationGroups.js +++ b/frontend/src/modules/Organizations/services/listOrganizationGroups.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listOrganizationGroups = ({ filter, organizationUri }) => ({ +export const listOrganizationGroups = ({ filter, organizationUri }) => ({ variables: { organizationUri, filter @@ -28,4 +28,3 @@ const listOrganizationGroups = ({ filter, organizationUri }) => ({ } ` }); -export default listOrganizationGroups; diff --git a/frontend/src/api/Organization/listOrganizations.js b/frontend/src/modules/Organizations/services/listOrganizations.js similarity index 88% rename from frontend/src/api/Organization/listOrganizations.js rename to frontend/src/modules/Organizations/services/listOrganizations.js index c92d25d3b..d2c213285 100644 --- a/frontend/src/api/Organization/listOrganizations.js +++ b/frontend/src/modules/Organizations/services/listOrganizations.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listOrganizations = ({ filter }) => ({ +export const listOrganizations = ({ filter }) => ({ variables: { filter }, query: gql` query ListOrg($filter: OrganizationFilter) { @@ -33,5 +33,3 @@ const listOrganizations = ({ filter }) => ({ } ` }); - -export default listOrganizations; diff --git a/frontend/src/modules/Organizations/services/removeGroup.js b/frontend/src/modules/Organizations/services/removeGroup.js new file mode 100644 index 000000000..05d8dba5a --- /dev/null +++ b/frontend/src/modules/Organizations/services/removeGroup.js @@ -0,0 +1,21 @@ +import { gql } from 'apollo-boost'; + +export const removeGroupFromOrganization = ({ organizationUri, groupUri }) => ({ + variables: { + organizationUri, + groupUri + }, + mutation: gql` + mutation removeGroupFromOrganization( + $organizationUri: String! + $groupUri: String! + ) { + removeGroupFromOrganization( + organizationUri: $organizationUri + groupUri: $groupUri + ) { + organizationUri + } + } + ` +}); diff --git a/frontend/src/api/Organization/updateOrganization.js b/frontend/src/modules/Organizations/services/updateOrganization.js similarity index 78% rename from frontend/src/api/Organization/updateOrganization.js rename to frontend/src/modules/Organizations/services/updateOrganization.js index fb4f8b12f..a42d42004 100644 --- a/frontend/src/api/Organization/updateOrganization.js +++ b/frontend/src/modules/Organizations/services/updateOrganization.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateOrganization = ({ organizationUri, input }) => ({ +export const updateOrganization = ({ organizationUri, input }) => ({ variables: { organizationUri, input @@ -18,5 +18,3 @@ const updateOrganization = ({ organizationUri, input }) => ({ } ` }); - -export default updateOrganization; diff --git a/frontend/src/views/Organizations/OrganizationCreateForm.js b/frontend/src/modules/Organizations/views/OrganizationCreateForm.js similarity index 95% rename from frontend/src/views/Organizations/OrganizationCreateForm.js rename to frontend/src/modules/Organizations/views/OrganizationCreateForm.js index ff41fbef2..6936e1948 100644 --- a/frontend/src/views/Organizations/OrganizationCreateForm.js +++ b/frontend/src/modules/Organizations/views/OrganizationCreateForm.js @@ -1,7 +1,4 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -17,17 +14,20 @@ import { TextField, Typography } from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import useGroups from '../../hooks/useGroups'; -import createOrganization from '../../api/Organization/createOrganization'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient, useGroups } from 'services'; +import { createOrganization } from '../services'; const OrganizationCreateForm = (props) => { const navigate = useNavigate(); diff --git a/frontend/src/views/Organizations/OrganizationEditForm.js b/frontend/src/modules/Organizations/views/OrganizationEditForm.js similarity index 95% rename from frontend/src/views/Organizations/OrganizationEditForm.js rename to frontend/src/modules/Organizations/views/OrganizationEditForm.js index ec7ad9fd3..82a42e860 100644 --- a/frontend/src/views/Organizations/OrganizationEditForm.js +++ b/frontend/src/modules/Organizations/views/OrganizationEditForm.js @@ -1,6 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -15,20 +13,22 @@ import { TextField, Typography } from '@mui/material'; +import CircularProgress from '@mui/material/CircularProgress'; import { Formik } from 'formik'; -import * as Yup from 'yup'; import { useSnackbar } from 'notistack'; -import CircularProgress from '@mui/material/CircularProgress'; -import { LoadingButton } from '@mui/lab'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import useClient from '../../hooks/useClient'; -import getOrganization from '../../api/Organization/getOrganization'; -import updateOrganization from '../../api/Organization/updateOrganization'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getOrganization, useClient } from 'services'; +import { updateOrganization } from '../services'; const OrganizationEditForm = (props) => { const navigate = useNavigate(); diff --git a/frontend/src/views/Organizations/OrganizationList.js b/frontend/src/modules/Organizations/views/OrganizationList.js similarity index 87% rename from frontend/src/views/Organizations/OrganizationList.js rename to frontend/src/modules/Organizations/views/OrganizationList.js index 200a04329..f7ee98429 100644 --- a/frontend/src/views/Organizations/OrganizationList.js +++ b/frontend/src/modules/Organizations/views/OrganizationList.js @@ -1,5 +1,3 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; import { Box, Breadcrumbs, @@ -13,21 +11,24 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import listOrganizations from '../../api/Organization/listOrganizations'; -import SearchIcon from '../../icons/Search'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import OrganizationListItem from './OrganizationListItem'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + PlusIcon, + SearchIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listOrganizations } from '../services'; +import { OrganizationListItem } from '../components'; const OrganizationList = () => { - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const dispatch = useDispatch(); const [inputValue, setInputValue] = useState(''); @@ -35,7 +36,7 @@ const OrganizationList = () => { const client = useClient(); const fetchItems = useCallback(async () => { setLoading(true); - const response = await client.query(listOrganizations({filter})); + const response = await client.query(listOrganizations({ filter })); if (!response.errors) { setItems(response.data.listOrganizations); } else { diff --git a/frontend/src/views/Organizations/OrganizationView.js b/frontend/src/modules/Organizations/views/OrganizationView.js similarity index 90% rename from frontend/src/views/Organizations/OrganizationView.js rename to frontend/src/modules/Organizations/views/OrganizationView.js index 35f424033..5a8095e5b 100644 --- a/frontend/src/views/Organizations/OrganizationView.js +++ b/frontend/src/modules/Organizations/views/OrganizationView.js @@ -1,6 +1,9 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { + ArchiveOutlined, + Info, + SupervisedUserCircleRounded, + Warning +} from '@mui/icons-material'; import { Box, Breadcrumbs, @@ -16,26 +19,25 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { - ArchiveOutlined, - Info, - SupervisedUserCircleRounded, - Warning -} from '@mui/icons-material'; import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaAws } from 'react-icons/fa'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PencilAltIcon from '../../icons/PencilAlt'; -import useClient from '../../hooks/useClient'; -import getOrganization from '../../api/Organization/getOrganization'; -import OrganizationEnvironments from './OrganizationEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import archiveOrganization from '../../api/Organization/archiveOrganization'; -import ArchiveObjectWithFrictionModal from '../../components/ArchiveObjectWithFrictionModal'; -import OrganizationTeams from './OrganizationTeams'; -import OrganizationOverview from './OrganizationOverview'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import { + ArchiveObjectWithFrictionModal, + ChevronRightIcon, + PencilAltIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getOrganization, useClient } from 'services'; +import { archiveOrganization } from '../services'; +import { + OrganizationEnvironments, + OrganizationOverview, + OrganizationTeams +} from '../components'; const tabs = [ { label: 'Overview', value: 'overview', icon: }, diff --git a/frontend/src/views/Pipelines/PipelineCICD.js b/frontend/src/modules/Pipelines/components/PipelineCICD.js similarity index 90% rename from frontend/src/views/Pipelines/PipelineCICD.js rename to frontend/src/modules/Pipelines/components/PipelineCICD.js index 1bc5ff168..65b390f6e 100644 --- a/frontend/src/views/Pipelines/PipelineCICD.js +++ b/frontend/src/modules/Pipelines/components/PipelineCICD.js @@ -1,5 +1,5 @@ -import PropTypes from 'prop-types'; -import { useState } from 'react'; +import { CopyAll } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Card, @@ -9,22 +9,18 @@ import { IconButton, List, ListItem, - Chip, Typography } from '@mui/material'; -import { LoadingButton } from '@mui/lab'; -import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; -import { CopyAll } from '@mui/icons-material'; import { useTheme } from '@mui/styles'; import { useSnackbar } from 'notistack'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import getDataPipelineCredsLinux from '../../api/DataPipeline/getDataPipelineCredsLinux'; -import ChipInput from "../../components/TagsInput"; -import Label from "../../components/Label"; +import PropTypes from 'prop-types'; +import { useState } from 'react'; +import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { getDataPipelineCredsLinux } from '../services'; -const PipelineCICD = (props) => { +export const PipelineCICD = (props) => { const { pipeline } = props; const client = useClient(); const theme = useTheme(); @@ -159,7 +155,9 @@ const PipelineCICD = (props) => { copyNotification()} - text={`git clone codecommit::${pipeline.environment.region}:${'//'}${pipeline.repo}`} + text={`git clone codecommit::${ + pipeline.environment.region + }:${'//'}${pipeline.repo}`} > { /> - {`git clone codecommit::${pipeline.environment.region}:${'//'}${pipeline.repo}`} - + {`git clone codecommit::${pipeline.environment.region}:${'//'}${ + pipeline.repo + }`} + @@ -204,5 +204,3 @@ PipelineCICD.propTypes = { // @ts-ignore pipeline: PropTypes.object.isRequired }; - -export default PipelineCICD; diff --git a/frontend/src/modules/Pipelines/components/PipelineEnvironmentCreateForm.js b/frontend/src/modules/Pipelines/components/PipelineEnvironmentCreateForm.js new file mode 100644 index 000000000..b19abab5e --- /dev/null +++ b/frontend/src/modules/Pipelines/components/PipelineEnvironmentCreateForm.js @@ -0,0 +1,293 @@ +import { DeleteOutlined } from '@mui/icons-material'; +import { + Box, + Button, + Card, + CardContent, + CardHeader, + Divider, + Grid, + IconButton, + MenuItem, + Table, + TableBody, + TableCell, + TableHead, + TableRow, + TextField +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React, { useEffect, useState } from 'react'; +import { Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, useClient } from 'services'; +import { createDataPipelineEnvironment } from '../services'; + +export const PipelineEnvironmentCreateForm = (props) => { + const { + environmentOptions, + triggerEnvSubmit, + pipelineUri, + handleCountEnvironmentValid + } = props; + const dispatch = useDispatch(); + const client = useClient(); + const [kvEnvs, setKeyValueEnvs] = useState([]); + const [mapGroups, setMapGroups] = useState(new Map()); + const stageOps = [ + { value: 'dev', label: 'dev' }, + { value: 'test', label: 'test' }, + { value: 'val', label: 'val' }, + { value: 'prod', label: 'prod' }, + { value: 'other', label: 'other' } + ]; + + const environmentOps = + environmentOptions && environmentOptions.length > 0 + ? environmentOptions + : [ + { environmentUri: 'someUri', label: 'some' }, + { environmentUri: 'someUri', label: 'some2' } + ]; + + const fetchGroups = async (environment) => { + try { + const response = await client.query( + listEnvironmentGroups({ + filter: Defaults.selectListFilter, + environmentUri: environment.environmentUri + }) + ); + + if (!response.errors) { + setMapGroups( + new Map( + mapGroups.set( + environment.environmentUri, + response.data.listEnvironmentGroups.nodes + ) + ) + ); //Array of groups (Objects) + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (e) { + dispatch({ type: SET_ERROR, error: e.message }); + } + }; + + const handleAddEnvRow = () => { + if (kvEnvs.length <= 40) { + const item = { + stage: '', + env: '', + team: '' + }; + setKeyValueEnvs((prevState) => [...prevState, item]); + } else { + dispatch({ + type: SET_ERROR, + error: 'You cannot add more than 40 development stages' + }); + } + }; + + const handleChange = (idx, field) => (e) => { + const { value } = e.target; + + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + if (field === 'stage') { + rows[idx].stage = value; + } else if (field === 'env') { + rows[idx].environmentLabel = value.label; + rows[idx].environmentUri = value.environmentUri; + } else { + rows[idx].samlGroupName = value; + } + return rows; + }); + }; + + const handleRemoveEnvRow = (idx) => { + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + rows.splice(idx, 1); + return rows; + }); + }; + + async function submit(element, index) { + try { + const response = await client.mutate( + createDataPipelineEnvironment({ + input: { + stage: element.stage, + order: index + 1, + pipelineUri: pipelineUri, + environmentLabel: element.environmentLabel, + environmentUri: element.environmentUri, + samlGroupName: element.samlGroupName + } + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + useEffect(() => { + if (client && triggerEnvSubmit && pipelineUri && kvEnvs.length > 0) { + kvEnvs.forEach((element, index) => submit(element, index)); + } + if (client && environmentOptions.length > 0) { + environmentOptions.forEach((element) => fetchGroups(element)); + } + }, [client, dispatch, triggerEnvSubmit, pipelineUri, environmentOptions]); + + useEffect(() => { + if (kvEnvs.length > 0) { + handleCountEnvironmentValid(true); + } else { + handleCountEnvironmentValid(false); + } + }, [kvEnvs.length]); + + return ( + <> + + + + + + + + + + + + + + + + {kvEnvs && kvEnvs.length > 0 && ( + + + Order + Development Stage + Environment + Team + + + )} + + {kvEnvs.map((item, idx) => ( + <> + + + + + + + {stageOps.map((stage) => ( + + {stage.label} + + ))} + + + + + {environmentOps.map((environment) => ( + + {environment.label} + + ))} + + + + + {mapGroups.get(kvEnvs[idx].environmentUri) && + mapGroups + .get(kvEnvs[idx].environmentUri) + .map((g) => ( + + {g.groupUri} + + ))} + + + + + + ))} + +
+ { + handleRemoveEnvRow(idx); + }} + > + + +
+ + + +
+
+
+
+
+
+ + ); +}; +PipelineEnvironmentCreateForm.propTypes = { + environmentOptions: PropTypes.array.isRequired, + triggerEnvSubmit: PropTypes.bool.isRequired, + pipelineUri: PropTypes.string.isRequired, + handleCountEnvironmentValid: PropTypes.func.isRequired +}; diff --git a/frontend/src/modules/Pipelines/components/PipelineEnvironmentEditForm.js b/frontend/src/modules/Pipelines/components/PipelineEnvironmentEditForm.js new file mode 100644 index 000000000..ce4110633 --- /dev/null +++ b/frontend/src/modules/Pipelines/components/PipelineEnvironmentEditForm.js @@ -0,0 +1,482 @@ +import { DeleteOutlined } from '@mui/icons-material'; +import { + Box, + Button, + Card, + CardContent, + CardHeader, + Divider, + Grid, + IconButton, + MenuItem, + Table, + TableBody, + TableCell, + TableHead, + TableRow, + TextField +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React, { useEffect, useState } from 'react'; +import { Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, useClient } from 'services'; +import { + createDataPipelineEnvironment, + deleteDataPipelineEnvironment, + updateDataPipelineEnvironment +} from '../services'; + +export const PipelineEnvironmentEditForm = (props) => { + const { + environmentOptions, + triggerEnvSubmit, + pipelineUri, + pipeline, + handleCountEnvironmentValid + } = props; + const dispatch = useDispatch(); + const client = useClient(); + const [kvEnvs, setKeyValueEnvs] = useState([]); + const [envsToRemove, setEnvsToRemove] = useState([]); + const [environments, setEnvironments] = useState([]); + const [mapGroups, setMapGroups] = useState(new Map()); + const stageOps = [ + { value: 'dev', label: 'dev' }, + { value: 'test', label: 'test' }, + { value: 'val', label: 'val' }, + { value: 'prod', label: 'prod' }, + { value: 'other', label: 'other' } + ]; + + const environmentOps = + environmentOptions && environmentOptions.length > 0 + ? environmentOptions + : [ + { environmentUri: 'someUri', label: 'some' }, + { environmentUri: 'someUri', label: 'some2' } + ]; + + useEffect(() => { + if (client && pipeline) { + const environmentsSorted = pipeline.developmentEnvironments.nodes.sort( + (a, b) => { + return a.order - b.order; + } + ); + if (environmentsSorted) { + environmentsSorted.map((e) => handleExistingEnvRow(e)); + } + } + }, [client, pipeline]); + + const fetchGroups = async (environment) => { + try { + const response = await client.query( + listEnvironmentGroups({ + filter: Defaults.selectListFilter, + environmentUri: environment.environmentUri + }) + ); + + if (!response.errors) { + setMapGroups( + new Map( + mapGroups.set( + environment.environmentUri, + response.data.listEnvironmentGroups.nodes + ) + ) + ); //Array of groups (Objects) + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (e) { + dispatch({ type: SET_ERROR, error: e.message }); + } + }; + + const handleExistingEnvRow = (e) => { + if (kvEnvs.length <= 40) { + const item = { + stage: e.stage, + env: e.environmentLabel, + environmentLabel: e.environmentLabel, + environmentUri: e.environmentUri, + envPipelineUri: e.envPipelineUri, + samlGroupName: e.samlGroupName, + team: e.samlGroupName, + AwsAccountId: e.AwsAccountId + }; + setEnvironments((prevState) => [...prevState, item]); + } else { + dispatch({ + type: SET_ERROR, + error: 'You cannot add more than 40 development stages' + }); + } + }; + + const handleAddEnvRow = () => { + if (kvEnvs.length <= 40) { + const item = { + stage: '', + env: '', + team: '' + }; + setKeyValueEnvs((prevState) => [...prevState, item]); + } else { + dispatch({ + type: SET_ERROR, + error: 'You cannot add more than 40 development stages' + }); + } + }; + + const handleChange = (idx, field) => (e) => { + const { value } = e.target; + + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + if (field === 'stage') { + rows[idx].stage = value; + } else if (field === 'env') { + rows[idx].environmentLabel = value.label; + rows[idx].environmentUri = value.environmentUri; + } else { + rows[idx].samlGroupName = value; + } + return rows; + }); + }; + + const handleRemoveEnvRow = (idx) => { + setKeyValueEnvs((prevstate) => { + const rows = [...prevstate]; + rows.splice(idx, 1); + return rows; + }); + }; + + const handleRemoveExistingEnvRow = (idx) => { + setEnvironments((prevstate) => { + const rows = [...prevstate]; + setEnvsToRemove((prevState) => [...prevState, rows[idx]]); + rows.splice(idx, 1); + return rows; + }); + }; + + async function submit(element, index) { + try { + const response = await client.mutate( + createDataPipelineEnvironment({ + input: { + stage: element.stage, + order: index + environments.length + 1, + pipelineUri: pipelineUri, + environmentLabel: element.environmentLabel, + environmentUri: element.environmentUri, + samlGroupName: element.samlGroupName + } + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function update(element, index) { + try { + const response = await client.mutate( + updateDataPipelineEnvironment({ + input: { + stage: element.stage, + order: index + 1, + pipelineUri: pipelineUri, + environmentLabel: element.environmentLabel, + environmentUri: element.environmentUri, + samlGroupName: element.samlGroupName + } + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + async function deleteEnv(element, index) { + try { + const response = await client.mutate( + deleteDataPipelineEnvironment({ + envPipelineUri: element.envPipelineUri + }) + ); + if (!response.errors) { + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + } catch (err) { + console.error(err); + dispatch({ type: SET_ERROR, error: err.message }); + } + } + + useEffect(() => { + if (client && triggerEnvSubmit && pipelineUri && envsToRemove.length > 0) { + envsToRemove.forEach((element, index) => deleteEnv(element, index)); + } + if (client && triggerEnvSubmit && pipelineUri && environments.length > 0) { + environments.forEach((element, index) => update(element, index)); + } + if (client && triggerEnvSubmit && pipelineUri && kvEnvs.length > 0) { + kvEnvs.forEach((element, index) => submit(element, index)); + } + if (client && environmentOptions.length > 0) { + environmentOptions.forEach((element) => fetchGroups(element)); + } + }, [client, dispatch, triggerEnvSubmit, pipelineUri, environmentOptions]); + + useEffect(() => { + if (kvEnvs.length + environments.length > 0) { + handleCountEnvironmentValid(true); + } else { + handleCountEnvironmentValid(false); + } + }, [kvEnvs.length, environments.length]); + + return ( + <> + + + + + + + + + + + + + + + + + {environments && environments.length > 0 && ( + + + Order + Development Stage + Environment + Team + AWS Account + + + )} + + {environments.map((item, idx) => ( + <> + + + + + + + + + + + + + + + + + + + + ))} + +
+ { + handleRemoveExistingEnvRow(idx); + }} + > + + +
+
+
+
+
+ + + + + + + + + + + + + + {kvEnvs && kvEnvs.length > 0 && ( + + + Order + Development Stage + Environment + Team + + + )} + + {kvEnvs.map((item, idx) => ( + <> + + + + + + + {stageOps.map((stage) => ( + + {stage.label} + + ))} + + + + + {environmentOps.map((environment) => ( + + {environment.label} + + ))} + + + + + {mapGroups.get(kvEnvs[idx].environmentUri) && + mapGroups + .get(kvEnvs[idx].environmentUri) + .map((g) => ( + + {g.groupUri} + + ))} + + + + + + ))} + +
+ { + handleRemoveEnvRow(idx); + }} + > + + +
+ + + +
+
+
+
+
+
+ + ); +}; +PipelineEnvironmentEditForm.propTypes = { + environmentOptions: PropTypes.array.isRequired, + triggerEnvSubmit: PropTypes.bool.isRequired, + pipelineUri: PropTypes.string.isRequired, + pipeline: PropTypes.object.isRequired, + handleCountEnvironmentValid: PropTypes.func.isRequired +}; diff --git a/frontend/src/modules/Pipelines/components/PipelineEnvironments.js b/frontend/src/modules/Pipelines/components/PipelineEnvironments.js new file mode 100644 index 000000000..d98d96dd6 --- /dev/null +++ b/frontend/src/modules/Pipelines/components/PipelineEnvironments.js @@ -0,0 +1,89 @@ +import { + Box, + Card, + CardContent, + CardHeader, + Divider, + Grid, + Table, + TableBody, + TableCell, + TableHead, + TableRow +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React, { useEffect, useState } from 'react'; +import { useClient } from 'services'; + +export const PipelineEnvironments = (props) => { + const { pipeline } = props; + const client = useClient(); + const [environments, setEnvironments] = useState([]); + + useEffect(() => { + if (client && pipeline) { + const environmentsSorted = pipeline.developmentEnvironments.nodes.sort( + (a, b) => { + return a.order - b.order; + } + ); + setEnvironments(environmentsSorted); + } + }, [client, pipeline]); + + return ( + <> + + + + + + + + + + + + + + + + + {environments > 0 && ( + + + Order + Development Stage + Environment + Team + AWS Account + + + )} + + {environments && + environments.map((e) => ( + <> + + {e.order} + {e.stage} + {e.environmentLabel} + {e.samlGroupName} + {e.AwsAccountId} + + + ))} + +
+
+
+
+
+
+
+ + ); +}; +PipelineEnvironments.propTypes = { + pipeline: PropTypes.object.isRequired +}; diff --git a/frontend/src/views/Pipelines/PipelineListItem.js b/frontend/src/modules/Pipelines/components/PipelineListItem.js similarity index 96% rename from frontend/src/views/Pipelines/PipelineListItem.js rename to frontend/src/modules/Pipelines/components/PipelineListItem.js index e6e8f8d89..493c40362 100644 --- a/frontend/src/views/Pipelines/PipelineListItem.js +++ b/frontend/src/modules/Pipelines/components/PipelineListItem.js @@ -8,19 +8,16 @@ import { Tooltip, Typography } from '@mui/material'; -import * as FiIcons from 'react-icons/fi'; -import * as FaIcons from 'react-icons/fa'; -import { Link as RouterLink } from 'react-router-dom'; import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; -import * as BsIcons from 'react-icons/bs'; import React from 'react'; -import IconAvatar from '../../components/IconAvatar'; -import StackStatus from '../../components/StackStatus'; -import Label from '../../components/Label'; -import useCardStyle from '../../hooks/useCardStyle'; +import * as BsIcons from 'react-icons/bs'; +import * as FaIcons from 'react-icons/fa'; +import * as FiIcons from 'react-icons/fi'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, Label, StackStatus, useCardStyle } from 'design'; -const PipelineListItem = (props) => { +export const PipelineListItem = (props) => { const { pipeline } = props; const classes = useCardStyle(); const navigate = useNavigate(); @@ -250,4 +247,3 @@ const PipelineListItem = (props) => { PipelineListItem.propTypes = { pipeline: PropTypes.object.isRequired }; -export default PipelineListItem; diff --git a/frontend/src/modules/Pipelines/components/PipelineOverview.js b/frontend/src/modules/Pipelines/components/PipelineOverview.js new file mode 100644 index 000000000..d06d95451 --- /dev/null +++ b/frontend/src/modules/Pipelines/components/PipelineOverview.js @@ -0,0 +1,56 @@ +import { Box, Grid } from '@mui/material'; +import PropTypes from 'prop-types'; +import { ObjectBrief, ObjectMetadata } from 'design'; +import { PipelineCICD, PipelineEnvironments } from '../components'; + +export const PipelineOverview = (props) => { + const { pipeline, ...other } = props; + + return ( + + + + + + + + + + + + + + + + + + + + + + 0 + ? pipeline.tags + : ['-'] + } + /> + + + + + ); +}; + +PipelineOverview.propTypes = { + pipeline: PropTypes.object.isRequired +}; diff --git a/frontend/src/modules/Pipelines/components/index.js b/frontend/src/modules/Pipelines/components/index.js new file mode 100644 index 000000000..c161fac4e --- /dev/null +++ b/frontend/src/modules/Pipelines/components/index.js @@ -0,0 +1,6 @@ +export * from './PipelineCICD'; +export * from './PipelineEnvironmentCreateForm'; +export * from './PipelineEnvironmentEditForm'; +export * from './PipelineEnvironments'; +export * from './PipelineListItem'; +export * from './PipelineOverview'; diff --git a/frontend/src/api/DataPipeline/createDataPipeline.js b/frontend/src/modules/Pipelines/services/createDataPipeline.js similarity index 77% rename from frontend/src/api/DataPipeline/createDataPipeline.js rename to frontend/src/modules/Pipelines/services/createDataPipeline.js index cdff7c2f7..b066a20f9 100644 --- a/frontend/src/api/DataPipeline/createDataPipeline.js +++ b/frontend/src/modules/Pipelines/services/createDataPipeline.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createDataPipeline = ({ input }) => ({ +export const createDataPipeline = ({ input }) => ({ variables: { input }, @@ -15,5 +15,3 @@ const createDataPipeline = ({ input }) => ({ } ` }); - -export default createDataPipeline; diff --git a/frontend/src/modules/Pipelines/services/createDataPipelineEnvironment.js b/frontend/src/modules/Pipelines/services/createDataPipelineEnvironment.js new file mode 100644 index 000000000..d793d3bb3 --- /dev/null +++ b/frontend/src/modules/Pipelines/services/createDataPipelineEnvironment.js @@ -0,0 +1,24 @@ +import { gql } from 'apollo-boost'; + +export const createDataPipelineEnvironment = ({ input }) => ({ + variables: { + input + }, + mutation: gql` + mutation createDataPipelineEnvironment( + $input: NewDataPipelineEnvironmentInput + ) { + createDataPipelineEnvironment(input: $input) { + envPipelineUri + environmentUri + environmentLabel + pipelineUri + pipelineLabel + stage + region + AwsAccountId + samlGroupName + } + } + ` +}); diff --git a/frontend/src/api/DataPipeline/deleteDataPipeline.js b/frontend/src/modules/Pipelines/services/deleteDataPipeline.js similarity index 76% rename from frontend/src/api/DataPipeline/deleteDataPipeline.js rename to frontend/src/modules/Pipelines/services/deleteDataPipeline.js index 397444670..2f81bbc7c 100644 --- a/frontend/src/api/DataPipeline/deleteDataPipeline.js +++ b/frontend/src/modules/Pipelines/services/deleteDataPipeline.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const deleteDataPipeline = ({ DataPipelineUri, deleteFromAWS }) => ({ +export const deleteDataPipeline = ({ DataPipelineUri, deleteFromAWS }) => ({ variables: { DataPipelineUri, deleteFromAWS @@ -17,5 +17,3 @@ const deleteDataPipeline = ({ DataPipelineUri, deleteFromAWS }) => ({ } ` }); - -export default deleteDataPipeline; diff --git a/frontend/src/modules/Pipelines/services/deleteDataPipelineEnvironment.js b/frontend/src/modules/Pipelines/services/deleteDataPipelineEnvironment.js new file mode 100644 index 000000000..1d011167a --- /dev/null +++ b/frontend/src/modules/Pipelines/services/deleteDataPipelineEnvironment.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteDataPipelineEnvironment = ({ envPipelineUri }) => ({ + variables: { + envPipelineUri + }, + mutation: gql` + mutation deleteDataPipelineEnvironment($envPipelineUri: String!) { + deleteDataPipelineEnvironment(envPipelineUri: $envPipelineUri) + } + ` +}); diff --git a/frontend/src/api/DataPipeline/getDataPipeline.js b/frontend/src/modules/Pipelines/services/getDataPipeline.js similarity index 94% rename from frontend/src/api/DataPipeline/getDataPipeline.js rename to frontend/src/modules/Pipelines/services/getDataPipeline.js index b83d7bf10..a20804e49 100644 --- a/frontend/src/api/DataPipeline/getDataPipeline.js +++ b/frontend/src/modules/Pipelines/services/getDataPipeline.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getDataPipeline = (DataPipelineUri) => ({ +export const getDataPipeline = (DataPipelineUri) => ({ variables: { DataPipelineUri }, @@ -66,5 +66,3 @@ const getDataPipeline = (DataPipelineUri) => ({ } ` }); - -export default getDataPipeline; diff --git a/frontend/src/modules/Pipelines/services/getDataPipelineCredsLinux.js b/frontend/src/modules/Pipelines/services/getDataPipelineCredsLinux.js new file mode 100644 index 000000000..6566a678c --- /dev/null +++ b/frontend/src/modules/Pipelines/services/getDataPipelineCredsLinux.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getDataPipelineCredsLinux = (DataPipelineUri) => ({ + variables: { + DataPipelineUri + }, + query: gql` + query GetDataPipelineCredsLinux($DataPipelineUri: String!) { + getDataPipelineCredsLinux(DataPipelineUri: $DataPipelineUri) + } + ` +}); diff --git a/frontend/src/modules/Pipelines/services/index.js b/frontend/src/modules/Pipelines/services/index.js new file mode 100644 index 000000000..6d00e073a --- /dev/null +++ b/frontend/src/modules/Pipelines/services/index.js @@ -0,0 +1,9 @@ +export * from './createDataPipeline'; +export * from './createDataPipelineEnvironment'; +export * from './deleteDataPipeline'; +export * from './deleteDataPipelineEnvironment'; +export * from './getDataPipeline'; +export * from './getDataPipelineCredsLinux'; +export * from './listDataPipelines'; +export * from './updateDataPipeline'; +export * from './updateDataPipelineEnvironment'; diff --git a/frontend/src/api/DataPipeline/listDataPipelines.js b/frontend/src/modules/Pipelines/services/listDataPipelines.js similarity index 92% rename from frontend/src/api/DataPipeline/listDataPipelines.js rename to frontend/src/modules/Pipelines/services/listDataPipelines.js index 362618d91..c16e5543f 100644 --- a/frontend/src/api/DataPipeline/listDataPipelines.js +++ b/frontend/src/modules/Pipelines/services/listDataPipelines.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const searchDataPipelines = (filter) => ({ +export const listDataPipelines = (filter) => ({ variables: { filter }, @@ -50,5 +50,3 @@ const searchDataPipelines = (filter) => ({ } ` }); - -export default searchDataPipelines; diff --git a/frontend/src/api/DataPipeline/updateDataPipeline.js b/frontend/src/modules/Pipelines/services/updateDataPipeline.js similarity index 80% rename from frontend/src/api/DataPipeline/updateDataPipeline.js rename to frontend/src/modules/Pipelines/services/updateDataPipeline.js index d805fdc54..f0a6ba554 100644 --- a/frontend/src/api/DataPipeline/updateDataPipeline.js +++ b/frontend/src/modules/Pipelines/services/updateDataPipeline.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateDataPipeline = ({ DataPipelineUri, input }) => ({ +export const updateDataPipeline = ({ DataPipelineUri, input }) => ({ variables: { DataPipelineUri, input @@ -20,5 +20,3 @@ const updateDataPipeline = ({ DataPipelineUri, input }) => ({ } ` }); - -export default updateDataPipeline; diff --git a/frontend/src/modules/Pipelines/services/updateDataPipelineEnvironment.js b/frontend/src/modules/Pipelines/services/updateDataPipelineEnvironment.js new file mode 100644 index 000000000..4bdb69234 --- /dev/null +++ b/frontend/src/modules/Pipelines/services/updateDataPipelineEnvironment.js @@ -0,0 +1,24 @@ +import { gql } from 'apollo-boost'; + +export const updateDataPipelineEnvironment = ({ input }) => ({ + variables: { + input + }, + mutation: gql` + mutation updateDataPipelineEnvironment( + $input: NewDataPipelineEnvironmentInput + ) { + updateDataPipelineEnvironment(input: $input) { + envPipelineUri + environmentUri + environmentLabel + pipelineUri + pipelineLabel + stage + region + AwsAccountId + samlGroupName + } + } + ` +}); diff --git a/frontend/src/views/Pipelines/PipelineCreateForm.js b/frontend/src/modules/Pipelines/views/PipelineCreateForm.js similarity index 82% rename from frontend/src/views/Pipelines/PipelineCreateForm.js rename to frontend/src/modules/Pipelines/views/PipelineCreateForm.js index 00d195265..fdcc1c286 100644 --- a/frontend/src/views/Pipelines/PipelineCreateForm.js +++ b/frontend/src/modules/Pipelines/views/PipelineCreateForm.js @@ -1,7 +1,4 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -18,24 +15,25 @@ import { TextField, Typography } from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; import { useCallback, useEffect, useState } from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import createDataPipeline from '../../api/DataPipeline/createDataPipeline'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; -import PipelineEnvironmentCreateForm from "./PipelineEnvironmentCreateForm"; - +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironmentGroups, listEnvironments, useClient } from 'services'; +import { createDataPipeline } from '../services'; +import { PipelineEnvironmentCreateForm } from '../components'; -const PipelineCrateForm = (props) => { +const PipelineCreateForm = (props) => { const navigate = useNavigate(); const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -44,19 +42,23 @@ const PipelineCrateForm = (props) => { const [loading, setLoading] = useState(true); const [groupOptions, setGroupOptions] = useState([]); const [environmentOptions, setEnvironmentOptions] = useState([]); - const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"}];/*DBT Pipelines*/ + const devOptions = [ + { value: 'cdk-trunk', label: 'CDK Pipelines - Trunk-based' }, + { value: 'trunk', label: 'CodePipeline - Trunk-based' }, + { value: 'gitflow', label: 'CodePipeline - Gitflow' } + ]; /*DBT Pipelines*/ const [triggerEnvSubmit, setTriggerEnvSubmit] = useState(false); const [countEnvironmentsValid, setCountEnvironmentsValid] = useState(false); const [pipelineUri, setPipelineUri] = useState(''); - const handleCountEnvironmentValid = state => { + const handleCountEnvironmentValid = (state) => { setCountEnvironmentsValid(state); - }; - + }; + const fetchEnvironments = useCallback(async () => { setLoading(true); const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) + listEnvironments({ filter: Defaults.selectListFilter }) ); if (!response.errors) { setEnvironmentOptions( @@ -76,7 +78,7 @@ const PipelineCrateForm = (props) => { try { const response = await client.query( listEnvironmentGroups({ - filter: Defaults.SelectListFilter, + filter: Defaults.selectListFilter, environmentUri }) ); @@ -102,55 +104,57 @@ const PipelineCrateForm = (props) => { ); } }, [client, dispatch, fetchEnvironments]); - + async function submit(values, setStatus, setSubmitting, setErrors) { - if (!countEnvironmentsValid){ - dispatch({ type: SET_ERROR, error: "At least one deployment environment is required" }) - }else{ - try { - const response = await client.mutate( - createDataPipeline({ - input: { - label: values.label, - environmentUri: values.environment.environmentUri, - description: values.description, - SamlGroupName: values.SamlGroupName, - tags: values.tags, - devStrategy: values.devStrategy - } - }) + if (!countEnvironmentsValid) { + dispatch({ + type: SET_ERROR, + error: 'At least one deployment environment is required' + }); + } else { + try { + const response = await client.mutate( + createDataPipeline({ + input: { + label: values.label, + environmentUri: values.environment.environmentUri, + description: values.description, + SamlGroupName: values.SamlGroupName, + tags: values.tags, + devStrategy: values.devStrategy + } + }) + ); + if (!response.errors) { + setStatus({ success: true }); + setTriggerEnvSubmit(true); + setPipelineUri(response.data.createDataPipeline.DataPipelineUri); + setSubmitting(false); + enqueueSnackbar('Pipeline creation started', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate( + `/console/pipelines/${response.data.createDataPipeline.DataPipelineUri}` ); - if (!response.errors) { - setStatus({ success: true }); - setTriggerEnvSubmit(true); - setPipelineUri(response.data.createDataPipeline.DataPipelineUri); - setSubmitting(false); - enqueueSnackbar('Pipeline creation started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/pipelines/${response.data.createDataPipeline.DataPipelineUri}` - ); - } else { - setTriggerEnvSubmit(false); - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); + } else { setTriggerEnvSubmit(false); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); + dispatch({ type: SET_ERROR, error: response.errors[0].message }); } + } catch (err) { + console.error(err); + setStatus({ success: false }); + setTriggerEnvSubmit(false); + setErrors({ submit: err.message }); + setSubmitting(false); + dispatch({ type: SET_ERROR, error: err.message }); } + } } - if (loading) { return ; } @@ -224,18 +228,19 @@ const PipelineCrateForm = (props) => { SamlGroupName: '', environment: '', tags: [], - devStrategy: 'cdk-trunk', + devStrategy: 'cdk-trunk' }} validationSchema={Yup.object().shape({ label: Yup.string() .max(255) .required('*Pipeline name is required'), description: Yup.string().max(5000), - SamlGroupName: Yup.string() - .max(255), + SamlGroupName: Yup.string().max(255), environment: Yup.object(), - devStrategy: Yup.string().required('*A CICD strategy is required'), - tags: Yup.array().nullable(), + devStrategy: Yup.string().required( + '*A CICD strategy is required' + ), + tags: Yup.array().nullable() })} onSubmit={async ( values, @@ -367,7 +372,10 @@ const PipelineCrateForm = (props) => { label="Team" name="SamlGroupName" onChange={(event) => { - setFieldValue('SamlGroupName', event.target.value); + setFieldValue( + 'SamlGroupName', + event.target.value + ); }} select value={values.SamlGroupName} @@ -439,7 +447,9 @@ const PipelineCrateForm = (props) => { environmentOptions={environmentOptions} triggerEnvSubmit={triggerEnvSubmit} pipelineUri={pipelineUri} - handleCountEnvironmentValid={handleCountEnvironmentValid} + handleCountEnvironmentValid={ + handleCountEnvironmentValid + } /> {errors.submit && ( @@ -463,7 +473,7 @@ const PipelineCrateForm = (props) => { Create Pipeline - + )} @@ -475,4 +485,4 @@ const PipelineCrateForm = (props) => { ); }; -export default PipelineCrateForm; \ No newline at end of file +export default PipelineCreateForm; diff --git a/frontend/src/views/Pipelines/PipelineEditForm.js b/frontend/src/modules/Pipelines/views/PipelineEditForm.js similarity index 84% rename from frontend/src/views/Pipelines/PipelineEditForm.js rename to frontend/src/modules/Pipelines/views/PipelineEditForm.js index a6d1c23a1..8b41b3a26 100644 --- a/frontend/src/views/Pipelines/PipelineEditForm.js +++ b/frontend/src/modules/Pipelines/views/PipelineEditForm.js @@ -1,8 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -18,21 +14,23 @@ import { TextField, Typography } from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getDataPipeline from '../../api/DataPipeline/getDataPipeline'; -import updateDataPipeline from '../../api/DataPipeline/updateDataPipeline'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import PipelineEnvironmentEditForm from "./PipelineEnvironmentEditForm"; -import * as Defaults from '../../components/defaults'; - +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listEnvironments, useClient } from 'services'; +import { getDataPipeline, updateDataPipeline } from '../services'; +import { PipelineEnvironmentEditForm } from '../components'; const PipelineEditForm = (props) => { const dispatch = useDispatch(); @@ -48,9 +46,9 @@ const PipelineEditForm = (props) => { const [triggerEnvSubmit, setTriggerEnvSubmit] = useState(false); const [countEnvironmentsValid, setCountEnvironmentsValid] = useState(false); - const handleCountEnvironmentValid = state => { + const handleCountEnvironmentValid = (state) => { setCountEnvironmentsValid(state); - }; + }; const fetchItem = useCallback(async () => { setLoadingPipeline(true); @@ -75,7 +73,7 @@ const PipelineEditForm = (props) => { const fetchEnvironments = useCallback(async () => { setLoadingEnvs(true); const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) + listEnvironments({ filter: Defaults.selectListFilter }) ); if (!response.errors) { setEnvironmentOptions( @@ -100,49 +98,52 @@ const PipelineEditForm = (props) => { }, [client, dispatch, fetchEnvironments]); async function submit(values, setStatus, setSubmitting, setErrors) { - if (!countEnvironmentsValid){ - dispatch({ type: SET_ERROR, error: "At least one deployment environment is required" }) - } else{ - try { - const response = await client.mutate( - updateDataPipeline({ - DataPipelineUri: pipeline.DataPipelineUri, - input: { - description: values.description, - label: values.label, - tags: values.tags - } - }) + if (!countEnvironmentsValid) { + dispatch({ + type: SET_ERROR, + error: 'At least one deployment environment is required' + }); + } else { + try { + const response = await client.mutate( + updateDataPipeline({ + DataPipelineUri: pipeline.DataPipelineUri, + input: { + description: values.description, + label: values.label, + tags: values.tags + } + }) + ); + if (!response.errors) { + setStatus({ success: true }); + setTriggerEnvSubmit(true); + setSubmitting(false); + enqueueSnackbar('Pipeline updated', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + navigate( + `/console/pipelines/${response.data.updateDataPipeline.DataPipelineUri}` ); - if (!response.errors) { - setStatus({ success: true }); - setTriggerEnvSubmit(true); - setSubmitting(false); - enqueueSnackbar('Pipeline updated', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/pipelines/${response.data.updateDataPipeline.DataPipelineUri}` - ); - } else { - setTriggerEnvSubmit(false); - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - setStatus({ success: false }); + } else { setTriggerEnvSubmit(false); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); + dispatch({ type: SET_ERROR, error: response.errors[0].message }); } + } catch (err) { + setStatus({ success: false }); + setTriggerEnvSubmit(false); + setErrors({ submit: err.message }); + setSubmitting(false); + dispatch({ type: SET_ERROR, error: err.message }); } } + } - if ((loadingPipeline || loadingEnvs) || (!pipeline && pipeline.environment)) { + if (loadingPipeline || loadingEnvs || (!pipeline && pipeline.environment)) { return ; } @@ -373,7 +374,9 @@ const PipelineEditForm = (props) => { triggerEnvSubmit={triggerEnvSubmit} pipelineUri={pipeline.DataPipelineUri} pipeline={pipeline} - handleCountEnvironmentValid={handleCountEnvironmentValid} + handleCountEnvironmentValid={ + handleCountEnvironmentValid + } /> {errors.submit && ( @@ -397,7 +400,7 @@ const PipelineEditForm = (props) => { Update Pipeline - + )} diff --git a/frontend/src/modules/Pipelines/views/PipelineList.js b/frontend/src/modules/Pipelines/views/PipelineList.js new file mode 100644 index 000000000..3695a3091 --- /dev/null +++ b/frontend/src/modules/Pipelines/views/PipelineList.js @@ -0,0 +1,241 @@ +import { + Autocomplete, + Box, + Breadcrumbs, + Button, + Container, + Divider, + Grid, + Link, + TextField, + Typography +} from '@mui/material'; +import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink } from 'react-router-dom'; +import { + ChevronRightIcon, + ChipInput, + Defaults, + Pager, + PlusIcon, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listDataPipelines } from '../services'; +import { AwsRegions } from 'utils'; +import { PipelineListItem } from '../components'; + +function PipelinesPageHeader() { + return ( + + + + Pipelines + + } + sx={{ mt: 1 }} + > + + Play + + + Pipelines + + + + + + + + + + ); +} + +const PipelineList = () => { + const dispatch = useDispatch(); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); + const { settings } = useSettings(); + const [inputValue, setInputValue] = useState(''); + const [loading, setLoading] = useState(true); + const client = useClient(); + const devOptions = [ + { value: 'cdk-trunk', label: 'CDK Pipelines - Trunk-based' }, + { value: 'trunk', label: 'CodePipeline - Trunk-based' }, + { value: 'gitflow', label: 'CodePipeline - Gitflow' } + ]; /*DBT Pipelines*/ + + const regions = AwsRegions.map((region) => ({ + label: region.name, + value: region.code + })); + + const [filterItems] = useState([ + { title: 'DevStrategy', options: devOptions }, + { title: 'Tags' }, + { title: 'Region', options: regions } + ]); + + const fetchItems = useCallback(async () => { + setLoading(true); + const response = await client.query(listDataPipelines(filter)); + if (!response.errors) { + setItems(response.data.listDataPipelines); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch, filter]); + + const handleInputChange = (event) => { + setInputValue(event.target.value); + setFilter({ ...filter, term: event.target.value }); + }; + + const handleInputKeyup = (event) => { + if (event.code === 'Enter') { + setFilter({ ...filter, page: 1, term: event.target.value }); + } + }; + + const handlePageChange = async (event, value) => { + if (value <= items.pages && value !== items.page) { + setFilter({ ...filter, page: value }); + } + }; + + const handleFilterChange = (filterLabel, values) => { + if (filterLabel === 'Region') { + const selectedRegions = values.map((region) => region.value); + setFilter({ ...filter, region: selectedRegions }); + } else if (filterLabel === 'Tags') { + setFilter({ ...filter, tags: values }); + } else if (filterLabel === 'DevStrategy') { + const selectedTypes = values.map((type) => type.value); + setFilter({ ...filter, type: selectedTypes }); + } + }; + + useEffect(() => { + if (client) { + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, filter, dispatch]); + + return ( + <> + + Pipelines | data.all + + + + + + + + + + {filterItems.map((item) => ( + + {item.title !== 'Tags' ? ( + option.label} + onChange={(event, value) => + handleFilterChange(item.title, value) + } + renderInput={(regionParams) => ( + + )} + /> + ) : ( + handleFilterChange(item.title, e)} + /> + )} + + ))} + + + + + {loading ? ( + + ) : ( + + + {items.nodes.map((node) => ( + + ))} + + + + )} + + + + + ); +}; + +export default PipelineList; diff --git a/frontend/src/views/Pipelines/PipelineView.js b/frontend/src/modules/Pipelines/views/PipelineView.js similarity index 87% rename from frontend/src/views/Pipelines/PipelineView.js rename to frontend/src/modules/Pipelines/views/PipelineView.js index 77cf3bc0b..1072d73f8 100644 --- a/frontend/src/views/Pipelines/PipelineView.js +++ b/frontend/src/modules/Pipelines/views/PipelineView.js @@ -1,6 +1,4 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { ForumOutlined, Info, LocalOffer } from '@mui/icons-material'; import { Box, Breadcrumbs, @@ -14,31 +12,29 @@ import { Tabs, Typography } from '@mui/material'; +import { useSnackbar } from 'notistack'; +import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaAws, FaTrash } from 'react-icons/fa'; import { useNavigate } from 'react-router'; -import * as PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { Link as RouterLink, useParams } from 'react-router-dom'; import { - ForumOutlined, - Info, - LocalOffer, - PlaylistPlay -} from '@mui/icons-material'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import Stack from '../Stack/Stack'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import PipelineOverview from './PipelineOverview'; -import PencilAltIcon from '../../icons/PencilAlt'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import deleteDataPipeline from '../../api/DataPipeline/deleteDataPipeline'; -import getDataPipeline from '../../api/DataPipeline/getDataPipeline'; -import StackStatus from '../Stack/StackStatus'; -import KeyValueTagList from '../KeyValueTags/KeyValueTagList'; -import FeedComments from '../Feed/FeedComments'; - + ChevronRightIcon, + DeleteObjectWithFrictionModal, + PencilAltIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { deleteDataPipeline, getDataPipeline } from '../services'; +import { + FeedComments, + KeyValueTagList, + StackStatus, + Stack +} from 'modules/Shared'; +import { PipelineOverview } from '../components'; function PipelineViewPageHeader({ pipeline, deletePipeline }) { const [openFeed, setOpenFeed] = useState(false); @@ -138,13 +134,14 @@ const PipelineView = () => { const [loading, setLoading] = useState(true); const [pipeline, setPipeline] = useState(null); const [stack, setStack] = useState(null); - const [cdkTrunk, setCdkTrunk] = useState(null); const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); - const [tabs, setTabs] = useState([ + const tabs = [ { label: 'Overview', value: 'overview', icon: }, { label: 'Tags', value: 'tags', icon: }, - { label: 'Stack', value: 'stack', icon: }]); - const handleDeleteObjectModalOpen = () => { + { label: 'Stack', value: 'stack', icon: } + ]; + + const handleDeleteObjectModalOpen = () => { setIsDeleteObjectModalOpen(true); }; @@ -165,7 +162,7 @@ const PipelineView = () => { } setLoading(false); }, [client, dispatch, params.uri, stack]); - + useEffect(() => { if (client) { fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); @@ -263,7 +260,11 @@ const PipelineView = () => { environmentUri={pipeline.environment.environmentUri} stackUri={pipeline.stack.stackUri} targetUri={pipeline.DataPipelineUri} - targetType={pipeline.devStrategy == 'cdk-trunk' ? "cdkpipeline" : "pipeline"} + targetType={ + pipeline.devStrategy === 'cdk-trunk' + ? 'cdkpipeline' + : 'pipeline' + } /> )} diff --git a/frontend/src/views/Feed/FeedCommentAdd.js b/frontend/src/modules/Shared/Comments/FeedCommentAdd.js similarity index 89% rename from frontend/src/views/Feed/FeedCommentAdd.js rename to frontend/src/modules/Shared/Comments/FeedCommentAdd.js index f3988d4bd..0ad034f5f 100644 --- a/frontend/src/views/Feed/FeedCommentAdd.js +++ b/frontend/src/modules/Shared/Comments/FeedCommentAdd.js @@ -1,14 +1,12 @@ -import { useState } from 'react'; -import { Box, IconButton, TextField, Tooltip } from '@mui/material'; import SendIcon from '@mui/icons-material/Send'; +import { Box, IconButton, TextField, Tooltip } from '@mui/material'; import { useSnackbar } from 'notistack'; import PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { postFeedMessage } from '../../api/Feed'; -import { useDispatch } from '../../store'; +import { useState } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { postFeedMessage, useClient } from 'services'; -const FeedCommentAdd = (props) => { +export const FeedCommentAdd = (props) => { const dispatch = useDispatch(); const { targetType, targetUri, reloadMessages } = props; const [value, setValue] = useState(''); @@ -97,5 +95,3 @@ FeedCommentAdd.propTypes = { targetType: PropTypes.string.isRequired, reloadMessages: PropTypes.func.isRequired }; - -export default FeedCommentAdd; diff --git a/frontend/src/views/Feed/FeedComments.js b/frontend/src/modules/Shared/Comments/FeedComments.js similarity index 87% rename from frontend/src/views/Feed/FeedComments.js rename to frontend/src/modules/Shared/Comments/FeedComments.js index c412ace1b..338a3998b 100644 --- a/frontend/src/views/Feed/FeedComments.js +++ b/frontend/src/modules/Shared/Comments/FeedComments.js @@ -1,4 +1,3 @@ -import { Link as RouterLink } from 'react-router-dom'; import { Box, CircularProgress, @@ -7,28 +6,24 @@ import { Link, Typography } from '@mui/material'; -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; import { useTheme } from '@mui/styles'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import * as Defaults from '../../components/defaults'; -import { listFeedMessages } from '../../api/Feed'; -import TextAvatar from '../../components/TextAvatar'; -import FeedCommentAdd from './FeedCommentAdd'; -import Label from '../../components/Label'; -import { dayjs } from '../../utils/dayjs'; -import Scrollbar from '../../components/Scrollbar'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Link as RouterLink } from 'react-router-dom'; +import { Defaults, Label, Scrollbar, TextAvatar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listFeedMessages, useClient } from 'services'; +import { dayjs } from 'utils'; +import { FeedCommentAdd } from './FeedCommentAdd'; -const FeedComments = (props) => { +export const FeedComments = (props) => { const { targetUri, targetType, objectOwner, open, onClose } = props; const dispatch = useDispatch(); const client = useClient(); const theme = useTheme(); const [loading, setLoading] = useState(true); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter] = useState(Defaults.SelectListFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter] = useState(Defaults.selectListFilter); const fetchItems = useCallback(async () => { setLoading(true); const response = await client.query( @@ -165,4 +160,3 @@ FeedComments.propTypes = { open: PropTypes.bool.isRequired, onClose: PropTypes.func.isRequired }; -export default FeedComments; diff --git a/frontend/src/modules/Shared/Comments/index.js b/frontend/src/modules/Shared/Comments/index.js new file mode 100644 index 000000000..539fc6d85 --- /dev/null +++ b/frontend/src/modules/Shared/Comments/index.js @@ -0,0 +1,2 @@ +export * from './FeedComments'; +export * from './FeedCommentAdd'; diff --git a/frontend/src/modules/Shared/KeyValueTags/KeyValueTagList.js b/frontend/src/modules/Shared/KeyValueTags/KeyValueTagList.js new file mode 100644 index 000000000..415366cb1 --- /dev/null +++ b/frontend/src/modules/Shared/KeyValueTags/KeyValueTagList.js @@ -0,0 +1,142 @@ +import { + Box, + Button, + Card, + CardHeader, + CircularProgress, + Divider, + Switch, + Table, + TableBody, + TableCell, + TableHead, + TableRow +} from '@mui/material'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { PencilAltIcon, Scrollbar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listKeyValueTags, useClient } from 'services'; +import { KeyValueTagUpdateForm } from './KeyValueTagUpdateForm'; + +export const KeyValueTagList = ({ targetUri, targetType }) => { + const client = useClient(); + const dispatch = useDispatch(); + const [items, setItems] = useState([]); + const [openUpdateForm, setOpenUpdateForm] = useState(false); + const [loading, setLoading] = useState(null); + + const fetchItems = useCallback(async () => { + setLoading(true); + const response = await client.query( + listKeyValueTags(targetUri, targetType) + ); + if (!response.errors) { + setItems(response.data.listKeyValueTags); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch, targetType, targetUri]); + + const openUpdate = () => { + setOpenUpdateForm(true); + }; + + const closeUpdate = () => { + fetchItems().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); + setOpenUpdateForm(false); + }; + + useEffect(() => { + if (client) { + fetchItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchItems]); + + if (loading) { + return ; + } + + return ( + + {items && ( + + {openUpdateForm ? ( + 0 + ? items + : [{ key: '', value: '', cascade: false }] + } + closeUpdate={closeUpdate} + /> + ) : ( + + + + + {items && items.length > 0 && ( + + Key-Value Tags} /> + + + + + + + Key + Value + {targetType === 'environment' && ( + Cascade enabled + )} + + + + {items.map((tag) => ( + + {tag.key || '-'} + {tag.value || '-'} + {targetType === 'environment' && ( + + + + )} + + ))} + +
+
+
+
+ )} + + )} + + )} + + ); +}; + +KeyValueTagList.propTypes = { + targetType: PropTypes.string.isRequired, + targetUri: PropTypes.string.isRequired +}; diff --git a/frontend/src/views/KeyValueTags/KeyValueTagUpdateForm.js b/frontend/src/modules/Shared/KeyValueTags/KeyValueTagUpdateForm.js similarity index 83% rename from frontend/src/views/KeyValueTags/KeyValueTagUpdateForm.js rename to frontend/src/modules/Shared/KeyValueTags/KeyValueTagUpdateForm.js index 688393d0d..455cbc0e8 100644 --- a/frontend/src/views/KeyValueTags/KeyValueTagUpdateForm.js +++ b/frontend/src/modules/Shared/KeyValueTags/KeyValueTagUpdateForm.js @@ -1,5 +1,5 @@ -import React, { useState } from 'react'; -import { useSnackbar } from 'notistack'; +import { DeleteOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Button, @@ -9,30 +9,28 @@ import { Divider, Grid, IconButton, + Switch, Table, TableBody, TableCell, TableHead, TableRow, - TextField, - Switch + TextField } from '@mui/material'; -import { DeleteOutlined } from '@mui/icons-material'; +import { useSnackbar } from 'notistack'; import PropTypes from 'prop-types'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import updateKeyValueTags from '../../api/KeyValueTags/updateKeyValueTags'; +import React, { useState } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { updateKeyValueTags, useClient } from 'services'; -const KeyValueTagUpdateForm = (props) => { +export const KeyValueTagUpdateForm = (props) => { const { targetType, targetUri, tags, closeUpdate } = props; const dispatch = useDispatch(); const { enqueueSnackbar } = useSnackbar(); const client = useClient(); const [isSubmitting, setIsSubmitting] = useState(false); const [kvTags, setKeyValueTags] = useState( - tags && tags.length > 0 ? tags : [{ key: '', value: '', cascade: false}] + tags && tags.length > 0 ? tags : [{ key: '', value: '', cascade: false }] ); const handleAddKeyValueRow = () => { @@ -83,7 +81,11 @@ const KeyValueTagUpdateForm = (props) => { targetType, tags: kvTags.length > 0 - ? kvTags.map((k) => ({ key: k.key, value: k.value, cascade: k.cascade })) + ? kvTags.map((k) => ({ + key: k.key, + value: k.value, + cascade: k.cascade + })) : [] }) ); @@ -125,7 +127,9 @@ const KeyValueTagUpdateForm = (props) => { Key Value - {targetType == 'environment' && (Cascade enabled)} + {targetType === 'environment' && ( + Cascade enabled + )} )} @@ -151,16 +155,21 @@ const KeyValueTagUpdateForm = (props) => { variant="outlined" /> - {targetType == 'environment' && ( + {targetType === 'environment' && ( + - )} + color="primary" + edge="start" + name="cascade" + checked={kvTags[idx].cascade} + value={kvTags[idx].cascade} + onChange={handleKeyValueChange( + idx, + 'cascade' + )} + /> + + )} { @@ -215,4 +224,3 @@ KeyValueTagUpdateForm.propTypes = { tags: PropTypes.array.isRequired, closeUpdate: PropTypes.func.isRequired }; -export default KeyValueTagUpdateForm; diff --git a/frontend/src/modules/Shared/KeyValueTags/index.js b/frontend/src/modules/Shared/KeyValueTags/index.js new file mode 100644 index 000000000..2b146bfa1 --- /dev/null +++ b/frontend/src/modules/Shared/KeyValueTags/index.js @@ -0,0 +1,2 @@ +export * from './KeyValueTagList'; +export * from './KeyValueTagUpdateForm'; diff --git a/frontend/src/views/Stack/Stack.js b/frontend/src/modules/Shared/Stack/Stack.js similarity index 95% rename from frontend/src/views/Stack/Stack.js rename to frontend/src/modules/Shared/Stack/Stack.js index ce66dff8e..fc75b6d96 100644 --- a/frontend/src/views/Stack/Stack.js +++ b/frontend/src/modules/Shared/Stack/Stack.js @@ -1,5 +1,5 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; +import { Article, RefreshRounded, SystemUpdate } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Button, @@ -15,19 +15,15 @@ import { TableRow, Typography } from '@mui/material'; -import { Article, RefreshRounded, SystemUpdate } from '@mui/icons-material'; import { useSnackbar } from 'notistack'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import { useDispatch } from '../../store'; -import getStack from '../../api/Stack/getStack'; -import { SET_ERROR } from '../../store/errorReducer'; -import StackStatus from '../../components/StackStatus'; -import Scrollbar from '../../components/Scrollbar'; -import StackLogs from './StackLogs'; -import updateStack from '../../api/Stack/updateStack'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Scrollbar, StackStatus } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getStack, updateStack, useClient } from 'services'; +import { StackLogs } from './StackLogs'; -const Stack = (props) => { +export const Stack = (props) => { const { environmentUri, stackUri, targetUri, targetType } = props; const client = useClient(); const dispatch = useDispatch(); @@ -276,4 +272,3 @@ Stack.propTypes = { targetUri: PropTypes.string.isRequired, targetType: PropTypes.string.isRequired }; -export default Stack; diff --git a/frontend/src/views/Stack/StackLogs.js b/frontend/src/modules/Shared/Stack/StackLogs.js similarity index 91% rename from frontend/src/views/Stack/StackLogs.js rename to frontend/src/modules/Shared/Stack/StackLogs.js index 193a22a2a..fcd8037b9 100644 --- a/frontend/src/views/Stack/StackLogs.js +++ b/frontend/src/modules/Shared/Stack/StackLogs.js @@ -1,5 +1,5 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; +import Editor from '@monaco-editor/react'; +import { RefreshRounded } from '@mui/icons-material'; import { Box, Button, @@ -8,16 +8,13 @@ import { Grid, Typography } from '@mui/material'; -import Editor from '@monaco-editor/react'; -import { RefreshRounded } from '@mui/icons-material'; -import useClient from '../../hooks/useClient'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getStackLogs from '../../api/Stack/getStackLogs'; -import { THEMES } from '../../constants'; -import useSettings from '../../hooks/useSettings'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { THEMES, useSettings } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getStackLogs, useClient } from 'services'; -const StackLogs = (props) => { +export const StackLogs = (props) => { const { environmentUri, stack, onClose, open } = props; const { settings } = useSettings(); const client = useClient(); @@ -125,4 +122,3 @@ StackLogs.propTypes = { onClose: PropTypes.func, open: PropTypes.bool.isRequired }; -export default StackLogs; diff --git a/frontend/src/modules/Shared/Stack/StackStatus.js b/frontend/src/modules/Shared/Stack/StackStatus.js new file mode 100644 index 000000000..efb90dd15 --- /dev/null +++ b/frontend/src/modules/Shared/Stack/StackStatus.js @@ -0,0 +1,140 @@ +import { CancelRounded } from '@mui/icons-material'; +import { + Box, + CircularProgress, + Grid, + IconButton, + Typography +} from '@mui/material'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; +import React, { useEffect } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { getStack, useClient } from 'services'; + +export const StackStatus = ({ stack, setStack, environmentUri }) => { + const { enqueueSnackbar, closeSnackbar } = useSnackbar(); + const client = useClient(); + const dispatch = useDispatch(); + + useEffect(() => { + closeSnackbar(); + if (stack) { + switch (stack.status) { + case 'CREATE_IN_PROGRESS': + case 'UPDATE_IN_PROGRESS': + case 'REVIEW_IN_PROGRESS': + case 'PENDING': + enqueueSnackbar( + + + + + + + + AWS CloudFormation stack deployment is in progress ! + + + + , + { + key: new Date().getTime() + Math.random(), + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'info', + persist: true, + action: (key) => ( + { + closeSnackbar(key); + }} + > + + + ) + } + ); + break; + case 'CREATE_FAILED': + case 'DELETE_COMPLETE': + case 'DELETE_FAILED': + case 'CREATE_ROLLBACK_COMPLETE': + enqueueSnackbar( + + An error occurred during the deployment of the AWS CloudFormation + stack. Stack status is {stack.status}. + , + { + key: new Date().getTime() + Math.random(), + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'error', + persist: true, + action: (key) => ( + { + closeSnackbar(key); + }} + > + + + ) + } + ); + break; + default: + closeSnackbar(); + break; + } + } + const fetchItem = async () => { + const response = await client.query( + getStack(environmentUri, stack.stackUri) + ); + if (!response.errors && response.data.getStack !== null) { + setStack(response.data.getStack); + } else { + const error = response.errors + ? response.errors[0].message + : 'AWS CloudFormation stack not found'; + dispatch({ type: SET_ERROR, error }); + } + }; + const interval = setInterval(() => { + if (client && stack) { + fetchItem().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, 10000); + return () => clearInterval(interval); + }, [ + client, + stack, + dispatch, + enqueueSnackbar, + closeSnackbar, + environmentUri, + setStack + ]); + + return <>; +}; +StackStatus.propTypes = { + stack: PropTypes.object.isRequired, + setStack: PropTypes.func.isRequired, + environmentUri: PropTypes.string.isRequired +}; diff --git a/frontend/src/modules/Shared/Stack/index.js b/frontend/src/modules/Shared/Stack/index.js new file mode 100644 index 000000000..e0fed56a2 --- /dev/null +++ b/frontend/src/modules/Shared/Stack/index.js @@ -0,0 +1,3 @@ +export * from './Stack'; +export * from './StackLogs'; +export * from './StackStatus'; diff --git a/frontend/src/modules/Shared/index.js b/frontend/src/modules/Shared/index.js new file mode 100644 index 000000000..ccffdd8d7 --- /dev/null +++ b/frontend/src/modules/Shared/index.js @@ -0,0 +1,3 @@ +export * from './Comments'; +export * from './KeyValueTags'; +export * from './Stack'; diff --git a/frontend/src/views/Shares/AddShareItemModal.js b/frontend/src/modules/Shares/components/AddShareItemModal.js similarity index 87% rename from frontend/src/views/Shares/AddShareItemModal.js rename to frontend/src/modules/Shares/components/AddShareItemModal.js index 43a078925..a21b55d13 100644 --- a/frontend/src/views/Shares/AddShareItemModal.js +++ b/frontend/src/modules/Shares/components/AddShareItemModal.js @@ -1,5 +1,4 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { Add } from '@mui/icons-material'; import { Box, Dialog, @@ -13,25 +12,21 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { Add } from '@mui/icons-material'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; import { useCallback, useEffect, useState } from 'react'; import { useParams } from 'react-router-dom'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import Scrollbar from '../../components/Scrollbar'; -import Pager from '../../components/Pager'; -import * as Defaults from '../../components/defaults'; -import { PagedResponseDefault } from '../../components/defaults'; -import getShareObject from '../../api/ShareObject/getShareObject'; -import addSharedItem from '../../api/ShareObject/addSharedItem'; +import { Defaults, Pager, Scrollbar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { addSharedItem, getShareObject } from '../services'; -const AddShareItemModal = (props) => { +export const AddShareItemModal = (props) => { const client = useClient(); const { share, onApply, onClose, open, reloadSharedItems, ...other } = props; const { enqueueSnackbar } = useSnackbar(); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [sharedItems, setSharedItems] = useState(PagedResponseDefault); + const [filter, setFilter] = useState(Defaults.filter); + const [sharedItems, setSharedItems] = useState(Defaults.pagedResponse); const dispatch = useDispatch(); const params = useParams(); const [loading, setLoading] = useState(true); @@ -185,5 +180,3 @@ AddShareItemModal.propTypes = { reloadSharedItems: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default AddShareItemModal; diff --git a/frontend/src/modules/Shares/components/RevokeShareItemsModal.js b/frontend/src/modules/Shares/components/RevokeShareItemsModal.js new file mode 100644 index 000000000..2aa066df8 --- /dev/null +++ b/frontend/src/modules/Shares/components/RevokeShareItemsModal.js @@ -0,0 +1,167 @@ +import { SyncAlt } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; +import { Box, Card, Dialog, Divider, Typography } from '@mui/material'; +import { DataGrid } from '@mui/x-data-grid'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { useParams } from 'react-router-dom'; +import { Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { getShareObject, revokeItemsShareObject } from '../services'; + +export const RevokeShareItemsModal = (props) => { + const client = useClient(); + const { share, onApply, onClose, open, reloadSharedItems, ...other } = props; + const { enqueueSnackbar } = useSnackbar(); + const [rows, setRows] = useState([]); + const dispatch = useDispatch(); + const params = useParams(); + const [loading, setLoading] = useState(true); + const [selectionModel, setSelectionModel] = useState([]); + const [pageSize, setPageSize] = useState(5); + + const fetchShareItems = useCallback(async () => { + setLoading(true); + const response = await client.query( + getShareObject({ + shareUri: params.uri, + filter: { + ...Defaults.filter, + pageSize: 1000, + isShared: true, + isRevokable: true + } + }) + ); + if (!response.errors) { + setRows( + response.data.getShareObject.items.nodes.map((item) => ({ + id: item.shareItemUri, + name: item.itemName, + type: item.itemType === 'StorageLocation' ? 'Folder' : 'Table', + status: item.status + })) + ); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }, [client, dispatch, params.uri, Defaults.filter]); + + const revoke = async () => { + setLoading(true); + const response = await client.mutate( + revokeItemsShareObject({ + input: { + shareUri: share.shareUri, + revokedItemUris: selectionModel + } + }) + ); + if (!response.errors) { + enqueueSnackbar('Items revoked', { + anchorOrigin: { + horizontal: 'right', + vertical: 'top' + }, + variant: 'success' + }); + await fetchShareItems(); + reloadSharedItems(true); + } else { + dispatch({ type: SET_ERROR, error: response.errors[0].message }); + } + setLoading(false); + }; + + useEffect(() => { + if (client) { + fetchShareItems().catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); + } + }, [client, dispatch, fetchShareItems]); + + if (!share) { + return null; + } + if (!rows) { + return null; + } + const header = [ + { field: 'name', headerName: 'Name', width: 200, editable: false }, + { field: 'type', headerName: 'Type', width: 300, editable: false }, + { field: 'status', headerName: 'Status', width: 300, editable: false } + ]; + + return ( + + + + Revoke access to items from share object {share.dataset.datasetName} + + + { + 'After selecting the items that you want to revoke, click on Revoke Selected Items' + } + + + + + {!loading && rows.length > 0 ? ( + setPageSize(newPageSize)} + checkboxSelection + onSelectionModelChange={(newSelection) => { + setSelectionModel(newSelection); + }} + selectionModel={selectionModel} + /> + ) : ( + + No items to revoke. + + )} + + + } + sx={{ m: 1 }} + variant="outlined" + > + Revoke Selected Items + + + + + ); +}; + +RevokeShareItemsModal.propTypes = { + share: PropTypes.object.isRequired, + onApply: PropTypes.func, + onClose: PropTypes.func, + reloadSharedItems: PropTypes.func, + open: PropTypes.bool.isRequired +}; diff --git a/frontend/src/views/Shares/ShareInboxList.js b/frontend/src/modules/Shares/components/ShareInboxList.js similarity index 79% rename from frontend/src/views/Shares/ShareInboxList.js rename to frontend/src/modules/Shares/components/ShareInboxList.js index d57413ca9..be9e90401 100644 --- a/frontend/src/views/Shares/ShareInboxList.js +++ b/frontend/src/modules/Shares/components/ShareInboxList.js @@ -1,23 +1,23 @@ -import { useCallback, useEffect, useState } from 'react'; import { Box, Container, Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; import PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import useSettings from '../../hooks/useSettings'; -import Pager from '../../components/Pager'; -import ShareInboxListItem from './ShareInboxListItem'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getShareRequestsToMe from '../../api/ShareObject/getShareRequestsToMe'; -import listDatasetShareObjects from '../../api/Dataset/listShareObjects'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Defaults, Pager, useSettings } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + listDatasetShareObjects, + getShareRequestsToMe, + useClient +} from 'services'; + +import { ShareInboxListItem } from './ShareInboxListItem'; -const ShareInboxList = (props) => { +export const ShareInboxList = (props) => { const { dataset } = props; const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const [loading, setLoading] = useState(true); const client = useClient(); @@ -112,5 +112,3 @@ const ShareInboxList = (props) => { ShareInboxList.propTypes = { dataset: PropTypes.object }; - -export default ShareInboxList; diff --git a/frontend/src/modules/Shares/components/ShareInboxListItem.js b/frontend/src/modules/Shares/components/ShareInboxListItem.js new file mode 100644 index 000000000..f2d33dadf --- /dev/null +++ b/frontend/src/modules/Shares/components/ShareInboxListItem.js @@ -0,0 +1,162 @@ +import { + Box, + Button, + Card, + CardHeader, + Divider, + Grid, + Link, + Typography +} from '@mui/material'; +import PropTypes from 'prop-types'; +import { Link as RouterLink } from 'react-router-dom'; +import { ShareStatus, TextAvatar, useCardStyle } from 'design'; + +export const ShareInboxListItem = ({ share }) => { + const classes = useCardStyle(); + + return ( + + + + } + disableTypography + subheader={ + + + + + + | For{' '} + + {share.dataset.datasetName} + {' '} + | {share.created} + + + } + title={ + + {share.owner} + + } + /> + + + {`Read access to Dataset: ${share.dataset.datasetName} + for Principal: ${share.principal.principalName} + from Environment: ${share.principal.environmentName}`} + + + + + + + {`Currently shared items: ${share.statistics.sharedItems}`} + + + + + {`Revoked items: ${share.statistics.revokedItems}`} + + + + + {`Failed items: ${share.statistics.failedItems}`} + + + + + {`Pending items: ${share.statistics.pendingItems}`} + + + + + + + + + + + + ); +}; +ShareInboxListItem.propTypes = { + share: PropTypes.object.isRequired, + reload: PropTypes.func.isRequired +}; diff --git a/frontend/src/views/Shares/ShareOutboxList.js b/frontend/src/modules/Shares/components/ShareOutboxList.js similarity index 77% rename from frontend/src/views/Shares/ShareOutboxList.js rename to frontend/src/modules/Shares/components/ShareOutboxList.js index ae78dafd8..b30ca6c11 100644 --- a/frontend/src/views/Shares/ShareOutboxList.js +++ b/frontend/src/modules/Shares/components/ShareOutboxList.js @@ -1,20 +1,17 @@ -import { useCallback, useEffect, useState } from 'react'; import { Box, Container, Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; +import { useCallback, useEffect, useState } from 'react'; import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import useSettings from '../../hooks/useSettings'; -import Pager from '../../components/Pager'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getShareRequestsFromMe from '../../api/ShareObject/getShareRequestsFromMe'; -import ShareOutboxListItem from './ShareOutboxListItem'; +import { Defaults, Pager, useSettings } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { getShareRequestsFromMe } from '../services'; +import { ShareOutboxListItem } from '../components'; -const ShareOutboxList = () => { +export const ShareOutboxList = () => { const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const [loading, setLoading] = useState(true); const client = useClient(); @@ -91,5 +88,3 @@ const ShareOutboxList = () => { ); }; - -export default ShareOutboxList; diff --git a/frontend/src/modules/Shares/components/ShareOutboxListItem.js b/frontend/src/modules/Shares/components/ShareOutboxListItem.js new file mode 100644 index 000000000..280bf5f3e --- /dev/null +++ b/frontend/src/modules/Shares/components/ShareOutboxListItem.js @@ -0,0 +1,159 @@ +import { + Box, + Button, + Card, + CardHeader, + Divider, + Grid, + Link, + Typography +} from '@mui/material'; +import PropTypes from 'prop-types'; +import { Link as RouterLink } from 'react-router-dom'; +import { ShareStatus, TextAvatar } from 'design'; + +export const ShareOutboxListItem = ({ share }) => { + return ( + + + + } + disableTypography + subheader={ + + + + + + | For{' '} + + {share.dataset.datasetName} + {' '} + | {share.created} + + + } + title={ + + {share.owner} + + } + /> + + + {`Read access to Dataset: ${share.dataset.datasetName} + for Principal: ${share.principal.principalName} + from Environment: ${share.principal.environmentName}`} + + + + + + + {`Currently shared items: ${share.statistics.sharedItems}`} + + + + + {`Revoked items: ${share.statistics.revokedItems}`} + + + + + {`Failed items: ${share.statistics.failedItems}`} + + + + + {`Pending items: ${share.statistics.pendingItems}`} + + + + + + + + + + + + ); +}; +ShareOutboxListItem.propTypes = { + share: PropTypes.object.isRequired, + reload: PropTypes.func.isRequired +}; diff --git a/frontend/src/views/Shares/ShareRejectModal.js b/frontend/src/modules/Shares/components/ShareRejectModal.js similarity index 93% rename from frontend/src/views/Shares/ShareRejectModal.js rename to frontend/src/modules/Shares/components/ShareRejectModal.js index 2011ee8c9..22639b0f7 100644 --- a/frontend/src/views/Shares/ShareRejectModal.js +++ b/frontend/src/modules/Shares/components/ShareRejectModal.js @@ -11,9 +11,9 @@ import { } from '@mui/material'; import { LoadingButton } from '@mui/lab'; import SendIcon from '@mui/icons-material/Send'; -import React, { useState } from 'react'; +import React from 'react'; -const ShareRejectModal = (props) => { +export const ShareRejectModal = (props) => { const { share, onApply, onClose, open, rejectFunction, ...other } = props; return ( @@ -36,14 +36,12 @@ const ShareRejectModal = (props) => { { + onSubmit={async (values) => { await rejectFunction(values.comment); }} > @@ -113,7 +111,5 @@ ShareRejectModal.propTypes = { onApply: PropTypes.func, onClose: PropTypes.func, rejectFunction: PropTypes.func.isRequired, - open: PropTypes.bool.isRequired, + open: PropTypes.bool.isRequired }; - -export default ShareRejectModal; diff --git a/frontend/src/views/Shares/ShareUpdateReject.js b/frontend/src/modules/Shares/components/ShareUpdateReject.js similarity index 80% rename from frontend/src/views/Shares/ShareUpdateReject.js rename to frontend/src/modules/Shares/components/ShareUpdateReject.js index d40ceed40..beeff7c87 100644 --- a/frontend/src/views/Shares/ShareUpdateReject.js +++ b/frontend/src/modules/Shares/components/ShareUpdateReject.js @@ -10,30 +10,33 @@ import { Typography } from '@mui/material'; import { LoadingButton } from '@mui/lab'; -import { SET_ERROR } from '../../store/errorReducer'; +import { SET_ERROR } from 'globalErrors'; import SendIcon from '@mui/icons-material/Send'; import React, { useState } from 'react'; -import updateShareRejectReason from '../../api/ShareObject/updateShareRejectReason'; +import { updateShareRejectReason } from '../services'; -const UpdateRejectReason = (props) => { - const { share, client, dispatch, enqueueSnackbar, fetchItem, ...other } = props; +export const UpdateRejectReason = (props) => { + const { share, client, dispatch, enqueueSnackbar, fetchItem, ...other } = + props; const [isUpdateRejectModalOpen, setIsUpdateRejectModalOpen] = useState(false); const [updating, setUpdating] = useState(false); - const handleUpdateRejectModalOpen = () => {setIsUpdateRejectModalOpen(true);}; - const handleUpdateRejectModalClose = () => {setIsUpdateRejectModalOpen(false);}; + const handleUpdateRejectModalOpen = () => { + setIsUpdateRejectModalOpen(true); + }; + const handleUpdateRejectModalClose = () => { + setIsUpdateRejectModalOpen(false); + }; const update = async (comment) => { setUpdating(true); const response = await client.mutate( - updateShareRejectReason( - { - shareUri: share.shareUri, - rejectPurpose: comment - } - ) + updateShareRejectReason({ + shareUri: share.shareUri, + rejectPurpose: comment + }) ); if (!response.errors) { - handleUpdateRejectModalClose() + handleUpdateRejectModalClose(); enqueueSnackbar('Share reject reason updated', { anchorOrigin: { horizontal: 'right', @@ -61,7 +64,13 @@ const UpdateRejectReason = (props) => { > Edit - + { Update Share Reject Reason - + Update a reason to reject the share request: { + onSubmit={async (values) => { await update(values.comment); }} > @@ -125,7 +136,9 @@ const UpdateRejectReason = (props) => { /> {touched.comment && errors.comment && ( - {errors.comment} + + {errors.comment} + )} @@ -157,7 +170,5 @@ UpdateRejectReason.propTypes = { client: PropTypes.any, dispatch: PropTypes.any, enqueueSnackbar: PropTypes.any, - fetchItem: PropTypes.func, + fetchItem: PropTypes.func }; - -export default UpdateRejectReason; diff --git a/frontend/src/views/Shares/ShareUpdateRequest.js b/frontend/src/modules/Shares/components/ShareUpdateRequest.js similarity index 79% rename from frontend/src/views/Shares/ShareUpdateRequest.js rename to frontend/src/modules/Shares/components/ShareUpdateRequest.js index bc2c003f1..e6e89749c 100644 --- a/frontend/src/views/Shares/ShareUpdateRequest.js +++ b/frontend/src/modules/Shares/components/ShareUpdateRequest.js @@ -10,31 +10,34 @@ import { Typography } from '@mui/material'; import { LoadingButton } from '@mui/lab'; -import { SET_ERROR } from '../../store/errorReducer'; +import { SET_ERROR } from 'globalErrors'; import SendIcon from '@mui/icons-material/Send'; import React, { useState } from 'react'; -import updateShareRequestReason from '../../api/ShareObject/updateShareRequestReason'; +import { updateShareRequestReason } from '../services'; - -const UpdateRequestReason = (props) => { - const { share, client, dispatch, enqueueSnackbar, fetchItem, ...other } = props; - const [isUpdateRequestModalOpen, setIsUpdateRequestModalOpen] = useState(false); +export const UpdateRequestReason = (props) => { + const { share, client, dispatch, enqueueSnackbar, fetchItem, ...other } = + props; + const [isUpdateRequestModalOpen, setIsUpdateRequestModalOpen] = + useState(false); const [updating, setUpdating] = useState(false); - const handleUpdateRequestModalOpen = () => {setIsUpdateRequestModalOpen(true);}; - const handleUpdateRequestModalClose = () => {setIsUpdateRequestModalOpen(false);}; + const handleUpdateRequestModalOpen = () => { + setIsUpdateRequestModalOpen(true); + }; + const handleUpdateRequestModalClose = () => { + setIsUpdateRequestModalOpen(false); + }; const update = async (comment) => { setUpdating(true); const response = await client.mutate( - updateShareRequestReason( - { - shareUri: share.shareUri, - requestPurpose: comment - } - ) + updateShareRequestReason({ + shareUri: share.shareUri, + requestPurpose: comment + }) ); if (!response.errors) { - handleUpdateRequestModalClose() + handleUpdateRequestModalClose(); enqueueSnackbar('Share request reason updated', { anchorOrigin: { horizontal: 'right', @@ -62,7 +65,13 @@ const UpdateRequestReason = (props) => { > Edit - + { Update Share Request - + Update a reason for your share request: @@ -85,9 +98,7 @@ const UpdateRequestReason = (props) => { validationSchema={Yup.object().shape({ comment: Yup.string().max(200) })} - onSubmit={async ( - values - ) => { + onSubmit={async (values) => { await update(values.comment); }} > @@ -126,7 +137,9 @@ const UpdateRequestReason = (props) => { /> {touched.comment && errors.comment && ( - {errors.comment} + + {errors.comment} + )} @@ -158,7 +171,5 @@ UpdateRequestReason.propTypes = { client: PropTypes.any, dispatch: PropTypes.any, enqueueSnackbar: PropTypes.any, - fetchItem: PropTypes.func, + fetchItem: PropTypes.func }; - -export default UpdateRequestReason; diff --git a/frontend/src/modules/Shares/components/index.js b/frontend/src/modules/Shares/components/index.js new file mode 100644 index 000000000..a1f7281fa --- /dev/null +++ b/frontend/src/modules/Shares/components/index.js @@ -0,0 +1,9 @@ +export * from './AddShareItemModal'; +export * from './RevokeShareItemsModal'; +export * from './ShareInboxList'; +export * from './ShareInboxListItem'; +export * from './ShareOutboxList'; +export * from './ShareOutboxListItem'; +export * from './ShareRejectModal'; +export * from './ShareUpdateReject'; +export * from './ShareUpdateRequest'; diff --git a/frontend/src/modules/Shares/index.js b/frontend/src/modules/Shares/index.js new file mode 100644 index 000000000..8c76e1369 --- /dev/null +++ b/frontend/src/modules/Shares/index.js @@ -0,0 +1 @@ +export { ShareInboxList } from './components'; diff --git a/frontend/src/modules/Shares/services/addSharedItem.js b/frontend/src/modules/Shares/services/addSharedItem.js new file mode 100644 index 000000000..be215acdd --- /dev/null +++ b/frontend/src/modules/Shares/services/addSharedItem.js @@ -0,0 +1,17 @@ +import { gql } from 'apollo-boost'; + +export const addSharedItem = ({ shareUri, input }) => { + return { + variables: { + shareUri, + input + }, + mutation: gql` + mutation AddSharedItem($shareUri: String!, $input: AddSharedItemInput!) { + addSharedItem(shareUri: $shareUri, input: $input) { + shareItemUri + } + } + ` + }; +}; diff --git a/frontend/src/modules/Shares/services/approveShareObject.js b/frontend/src/modules/Shares/services/approveShareObject.js new file mode 100644 index 000000000..b5ddabcb8 --- /dev/null +++ b/frontend/src/modules/Shares/services/approveShareObject.js @@ -0,0 +1,15 @@ +import { gql } from 'apollo-boost'; + +export const approveShareObject = ({ shareUri }) => ({ + variables: { + shareUri + }, + mutation: gql` + mutation approveShareObject($shareUri: String!) { + approveShareObject(shareUri: $shareUri) { + shareUri + status + } + } + ` +}); diff --git a/frontend/src/modules/Shares/services/deleteShareObject.js b/frontend/src/modules/Shares/services/deleteShareObject.js new file mode 100644 index 000000000..63e38f0a3 --- /dev/null +++ b/frontend/src/modules/Shares/services/deleteShareObject.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteShareObject = ({ shareUri }) => ({ + variables: { + shareUri + }, + mutation: gql` + mutation DeleteShareObject($shareUri: String!) { + deleteShareObject(shareUri: $shareUri) + } + ` +}); diff --git a/frontend/src/api/ShareObject/getShareObject.js b/frontend/src/modules/Shares/services/getShareObject.js similarity index 93% rename from frontend/src/api/ShareObject/getShareObject.js rename to frontend/src/modules/Shares/services/getShareObject.js index e7da50f4e..eebf5cdfd 100644 --- a/frontend/src/api/ShareObject/getShareObject.js +++ b/frontend/src/modules/Shares/services/getShareObject.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getShareObject = ({ shareUri, filter }) => ({ +export const getShareObject = ({ shareUri, filter }) => ({ variables: { shareUri, filter @@ -60,5 +60,3 @@ const getShareObject = ({ shareUri, filter }) => ({ } ` }); - -export default getShareObject; diff --git a/frontend/src/api/ShareObject/getShareRequestsFromMe.js b/frontend/src/modules/Shares/services/getShareRequestsFromMe.js similarity index 92% rename from frontend/src/api/ShareObject/getShareRequestsFromMe.js rename to frontend/src/modules/Shares/services/getShareRequestsFromMe.js index 2669408c0..17a91784c 100644 --- a/frontend/src/api/ShareObject/getShareRequestsFromMe.js +++ b/frontend/src/modules/Shares/services/getShareRequestsFromMe.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getShareRequestsFromMe = ({ filter }) => ({ +export const getShareRequestsFromMe = ({ filter }) => ({ variables: { filter }, query: gql` query getShareRequestsFromMe($filter: ShareObjectFilter) { @@ -48,5 +48,3 @@ const getShareRequestsFromMe = ({ filter }) => ({ } ` }); - -export default getShareRequestsFromMe; diff --git a/frontend/src/modules/Shares/services/index.js b/frontend/src/modules/Shares/services/index.js new file mode 100644 index 000000000..c7f7e35d0 --- /dev/null +++ b/frontend/src/modules/Shares/services/index.js @@ -0,0 +1,11 @@ +export * from './addSharedItem'; +export * from './approveShareObject'; +export * from './deleteShareObject'; +export * from './getShareObject'; +export * from './getShareRequestsFromMe'; +export * from './rejectShareObject'; +export * from './removeSharedItem'; +export * from './revokeItemsShareObject'; +export * from './submitApproval'; +export * from './updateShareRejectReason'; +export * from './updateShareRequestReason'; diff --git a/frontend/src/modules/Shares/services/rejectShareObject.js b/frontend/src/modules/Shares/services/rejectShareObject.js new file mode 100644 index 000000000..98f95924c --- /dev/null +++ b/frontend/src/modules/Shares/services/rejectShareObject.js @@ -0,0 +1,16 @@ +import { gql } from 'apollo-boost'; + +export const rejectShareObject = ({ shareUri, rejectPurpose }) => ({ + variables: { + shareUri, + rejectPurpose + }, + mutation: gql` + mutation RejectShareObject($shareUri: String!, $rejectPurpose: String!) { + rejectShareObject(shareUri: $shareUri, rejectPurpose: $rejectPurpose) { + shareUri + status + } + } + ` +}); diff --git a/frontend/src/modules/Shares/services/removeSharedItem.js b/frontend/src/modules/Shares/services/removeSharedItem.js new file mode 100644 index 000000000..18c61762b --- /dev/null +++ b/frontend/src/modules/Shares/services/removeSharedItem.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const removeSharedItem = ({ shareItemUri }) => ({ + variables: { + shareItemUri + }, + mutation: gql` + mutation RemoveSharedItem($shareItemUri: String!) { + removeSharedItem(shareItemUri: $shareItemUri) + } + ` +}); diff --git a/frontend/src/modules/Shares/services/revokeItemsShareObject.js b/frontend/src/modules/Shares/services/revokeItemsShareObject.js new file mode 100644 index 000000000..6b0ecff31 --- /dev/null +++ b/frontend/src/modules/Shares/services/revokeItemsShareObject.js @@ -0,0 +1,15 @@ +import { gql } from 'apollo-boost'; + +export const revokeItemsShareObject = ({ input }) => ({ + variables: { + input + }, + mutation: gql` + mutation revokeItemsShareObject($input: RevokeItemsInput) { + revokeItemsShareObject(input: $input) { + shareUri + status + } + } + ` +}); diff --git a/frontend/src/api/ShareObject/submitApproval.js b/frontend/src/modules/Shares/services/submitApproval.js similarity index 76% rename from frontend/src/api/ShareObject/submitApproval.js rename to frontend/src/modules/Shares/services/submitApproval.js index aacba1580..bb0756d78 100644 --- a/frontend/src/api/ShareObject/submitApproval.js +++ b/frontend/src/modules/Shares/services/submitApproval.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const submitApproval = ({ shareUri }) => ({ +export const submitApproval = ({ shareUri }) => ({ variables: { shareUri }, @@ -13,5 +13,3 @@ const submitApproval = ({ shareUri }) => ({ } ` }); - -export default submitApproval; diff --git a/frontend/src/modules/Shares/services/updateShareRejectReason.js b/frontend/src/modules/Shares/services/updateShareRejectReason.js new file mode 100644 index 000000000..afea8a14e --- /dev/null +++ b/frontend/src/modules/Shares/services/updateShareRejectReason.js @@ -0,0 +1,19 @@ +import { gql } from 'apollo-boost'; + +export const updateShareRejectReason = ({ shareUri, rejectPurpose }) => ({ + variables: { + shareUri, + rejectPurpose + }, + mutation: gql` + mutation updateShareRejectReason( + $shareUri: String! + $rejectPurpose: String! + ) { + updateShareRejectReason( + shareUri: $shareUri + rejectPurpose: $rejectPurpose + ) + } + ` +}); diff --git a/frontend/src/modules/Shares/services/updateShareRequestReason.js b/frontend/src/modules/Shares/services/updateShareRequestReason.js new file mode 100644 index 000000000..7ad7fbbb9 --- /dev/null +++ b/frontend/src/modules/Shares/services/updateShareRequestReason.js @@ -0,0 +1,19 @@ +import { gql } from 'apollo-boost'; + +export const updateShareRequestReason = ({ shareUri, requestPurpose }) => ({ + variables: { + shareUri, + requestPurpose + }, + mutation: gql` + mutation updateShareRequestReason( + $shareUri: String! + $requestPurpose: String! + ) { + updateShareRequestReason( + shareUri: $shareUri + requestPurpose: $requestPurpose + ) + } + ` +}); diff --git a/frontend/src/views/Shares/ShareList.js b/frontend/src/modules/Shares/views/ShareList.js similarity index 93% rename from frontend/src/views/Shares/ShareList.js rename to frontend/src/modules/Shares/views/ShareList.js index c2f8fa888..141efaf63 100644 --- a/frontend/src/views/Shares/ShareList.js +++ b/frontend/src/modules/Shares/views/ShareList.js @@ -1,6 +1,3 @@ -import { useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; import { Box, Breadcrumbs, @@ -12,12 +9,13 @@ import { Tabs, Typography } from '@mui/material'; -import { RiInboxArchiveLine } from 'react-icons/ri'; +import { useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FiSend } from 'react-icons/fi'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ShareInboxList from './ShareInboxList'; -import ShareOutboxList from './ShareOutboxList'; +import { RiInboxArchiveLine } from 'react-icons/ri'; +import { Link as RouterLink } from 'react-router-dom'; +import { ChevronRightIcon, useSettings } from 'design'; +import { ShareInboxList, ShareOutboxList } from '../components'; const tabs = [ { label: 'Received', value: 'inbox', icon: }, diff --git a/frontend/src/views/Shares/ShareView.js b/frontend/src/modules/Shares/views/ShareView.js similarity index 76% rename from frontend/src/views/Shares/ShareView.js rename to frontend/src/modules/Shares/views/ShareView.js index b3aff9afd..308005954 100644 --- a/frontend/src/views/Shares/ShareView.js +++ b/frontend/src/modules/Shares/views/ShareView.js @@ -1,6 +1,12 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { + BlockOutlined, + CheckCircleOutlined, + CopyAllOutlined, + DeleteOutlined, + RefreshRounded, + RemoveCircleOutlineOutlined +} from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -24,45 +30,41 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { - BlockOutlined, - CheckCircleOutlined, - CopyAllOutlined, - DeleteOutlined, - RemoveCircleOutlineOutlined, - LockRounded, - RefreshRounded -} from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; -import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; import { useTheme } from '@mui/styles'; -import * as PropTypes from 'prop-types'; import { useSnackbar } from 'notistack'; +import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { CopyToClipboard } from 'react-copy-to-clipboard/lib/Component'; +import { Helmet } from 'react-helmet-async'; import { useNavigate } from 'react-router'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ShareStatus from '../../components/ShareStatus'; -import TextAvatar from '../../components/TextAvatar'; -import Pager from '../../components/Pager'; -import Scrollbar from '../../components/Scrollbar'; -import * as Defaults from '../../components/defaults'; -import { PagedResponseDefault } from '../../components/defaults'; -import AddShareItemModal from './AddShareItemModal'; -import RevokeShareItemsModal from './RevokeShareItemsModal'; -import getShareObject from '../../api/ShareObject/getShareObject'; -import approveShareObject from '../../api/ShareObject/approveShareObject'; -import rejectShareObject from '../../api/ShareObject/rejectShareObject'; -import deleteShareObject from '../../api/ShareObject/deleteShareObject.js'; -import submitApproval from '../../api/ShareObject/submitApproval'; -import removeSharedItem from '../../api/ShareObject/removeSharedItem'; -import ShareRejectModal from './ShareRejectModal'; -import UpdateRejectReason from './ShareUpdateReject'; -import UpdateRequestReason from './ShareUpdateRequest'; - +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + PlusIcon, + Scrollbar, + ShareStatus, + TextAvatar, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { + approveShareObject, + deleteShareObject, + getShareObject, + rejectShareObject, + removeSharedItem, + submitApproval +} from '../services'; +import { + AddShareItemModal, + RevokeShareItemsModal, + ShareRejectModal, + UpdateRejectReason, + UpdateRequestReason +} from '../components'; function ShareViewHeader(props) { const { @@ -78,7 +80,6 @@ function ShareViewHeader(props) { const [accepting, setAccepting] = useState(false); const [rejecting, setRejecting] = useState(false); const [submitting, setSubmitting] = useState(false); - const [removing, setRemoving] = useState(false); const [isRejectShareModalOpen, setIsRejectShareModalOpen] = useState(false); const submit = async () => { setSubmitting(true); @@ -87,6 +88,7 @@ function ShareViewHeader(props) { shareUri: share.shareUri }) ); + if (!response.errors) { enqueueSnackbar('Share request submitted', { anchorOrigin: { @@ -101,13 +103,14 @@ function ShareViewHeader(props) { } setSubmitting(false); }; + const remove = async () => { - setRemoving(true); const response = await client.mutate( deleteShareObject({ shareUri: share.shareUri }) ); + if (!response.errors) { enqueueSnackbar('Share request deleted', { anchorOrigin: { @@ -120,7 +123,6 @@ function ShareViewHeader(props) { } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } - setRemoving(false); }; const handleRejectShareModalOpen = () => { @@ -138,6 +140,7 @@ function ShareViewHeader(props) { shareUri: share.shareUri }) ); + if (!response.errors) { enqueueSnackbar('Share request approved', { anchorOrigin: { @@ -153,6 +156,7 @@ function ShareViewHeader(props) { } setAccepting(false); }; + const reject = async (rejectPurpose) => { setRejecting(true); const response = await client.mutate( @@ -161,8 +165,9 @@ function ShareViewHeader(props) { rejectPurpose: rejectPurpose }) ); + if (!response.errors) { - handleRejectShareModalClose() + handleRejectShareModalClose(); enqueueSnackbar('Share request rejected', { anchorOrigin: { horizontal: 'right', @@ -177,129 +182,131 @@ function ShareViewHeader(props) { } setRejecting(false); }; + return ( <> - - - - Share object for {share.dataset?.datasetName} - - } - sx={{ mt: 1 }} - > - - Shares - - - Shares - - - {share.dataset?.datasetName} + + + + Share object for {share.dataset?.datasetName} - - - - {!loading && ( - - - {share.userRoleForShareObject === 'Approvers' ? ( - <> - {share.status === 'Submitted' && ( - <> + {share.dataset?.datasetName} + + + + + {!loading && ( + + + {share.userRoleForShareObject === 'Approvers' ? ( + <> + {share.status === 'Submitted' && ( + <> + } + sx={{ m: 1 }} + onClick={accept} + type="button" + variant="outlined" + > + Approve + + } + onClick={handleRejectShareModalOpen} + type="button" + variant="outlined" + > + Reject + + + )} + + ) : ( + <> + {(share.status === 'Draft' || + share.status === 'Rejected') && ( } sx={{ m: 1 }} - onClick={accept} - type="button" - variant="outlined" - > - Approve - - } - onClick={handleRejectShareModalOpen} + onClick={submit} type="button" - variant="outlined" + variant="contained" > - Reject + Submit - - )} - - ) : ( - <> - {(share.status === 'Draft' || share.status === 'Rejected') && ( - } - sx={{ m: 1 }} - onClick={submit} - type="button" - variant="contained" - > - Submit - - )} - - )} - - - )} + )} + + )} + + + )} + - - {isRejectShareModalOpen && ( - - )} + {isRejectShareModalOpen && ( + + )} ); } @@ -355,38 +362,34 @@ function SharedItem(props) { - {(isRemovingItem) ? ( + {isRemovingItem ? ( ) : ( - <> - { - (item.status === 'Share_Succeeded' || item.status === 'Revoke_Failed') ? ( - - Revoke access to this item before deleting - - ) : (item.status === 'Share_Approved' || item.status === 'Revoke_Approved' || item.status === 'Revoke_In_Progress' || item.status === 'Share_In_Progress') ? ( - - Wait until this item is processed - - ) : ( - - ) - } - + <> + {item.status === 'Share_Succeeded' || + item.status === 'Revoke_Failed' ? ( + + Revoke access to this item before deleting + + ) : item.status === 'Share_Approved' || + item.status === 'Revoke_Approved' || + item.status === 'Revoke_In_Progress' || + item.status === 'Share_In_Progress' ? ( + + Wait until this item is processed + + ) : ( + + )} + )} @@ -406,8 +409,8 @@ const ShareView = () => { const { settings } = useSettings(); const { enqueueSnackbar } = useSnackbar(); const [share, setShare] = useState(null); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [sharedItems, setSharedItems] = useState(PagedResponseDefault); + const [filter, setFilter] = useState(Defaults.filter); + const [sharedItems, setSharedItems] = useState(Defaults.pagedResponse); const navigate = useNavigate(); const dispatch = useDispatch(); const params = useParams(); @@ -417,10 +420,18 @@ const ShareView = () => { const [loadingShareItems, setLoadingShareItems] = useState(false); const [isAddItemModalOpen, setIsAddItemModalOpen] = useState(false); const [isRevokeItemsModalOpen, setIsRevokeItemsModalOpen] = useState(false); - const handleAddItemModalOpen = () => {setIsAddItemModalOpen(true);}; - const handleAddItemModalClose = () => {setIsAddItemModalOpen(false);}; - const handleRevokeItemModalOpen = () => {setIsRevokeItemsModalOpen(true);}; - const handleRevokeItemModalClose = () => {setIsRevokeItemsModalOpen(false);}; + const handleAddItemModalOpen = () => { + setIsAddItemModalOpen(true); + }; + const handleAddItemModalClose = () => { + setIsAddItemModalOpen(false); + }; + const handleRevokeItemModalOpen = () => { + setIsRevokeItemsModalOpen(true); + }; + const handleRevokeItemModalClose = () => { + setIsRevokeItemsModalOpen(false); + }; const handlePageChange = async (event, value) => { if (value <= sharedItems.pages && value !== sharedItems.page) { await setFilter({ ...filter, isShared: true, page: value }); @@ -435,7 +446,7 @@ const ShareView = () => { variant: 'success' }); }; - + const fetchItem = useCallback(async () => { setLoading(true); const response = await client.query( @@ -472,7 +483,7 @@ const ShareView = () => { }, [client, dispatch, filter, fetchItem, params.uri] ); - + useEffect(() => { if (client) { fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); @@ -720,52 +731,46 @@ const ShareView = () => { - + Request Purpose {share.userRoleForShareObject === 'Requesters' && ( - + )} {share.requestPurpose || '-'} - + Reject Purpose {share.userRoleForShareObject === 'Approvers' && ( - + )} {share.rejectPurpose || '-'} @@ -783,10 +788,18 @@ const ShareView = () => { - + S3 Access Point name (Folder sharing): - + {` ${share.consumptionData.s3AccessPointName || '-'}`} @@ -809,11 +822,21 @@ const ShareView = () => { - + Glue database name (Table sharing): - - {` ${share.consumptionData.sharedGlueDatabase || '-'}`} + + {` ${ + share.consumptionData.sharedGlueDatabase || '-' + }`} { onClick={handleRevokeItemModalOpen} type="button" variant="outlined" - > + > Revoke Items diff --git a/frontend/src/views/Tables/TableColumns.js b/frontend/src/modules/Tables/components/TableColumns.js similarity index 88% rename from frontend/src/views/Tables/TableColumns.js rename to frontend/src/modules/Tables/components/TableColumns.js index 346da5914..b3dd475f9 100644 --- a/frontend/src/views/Tables/TableColumns.js +++ b/frontend/src/modules/Tables/components/TableColumns.js @@ -1,19 +1,16 @@ -import React, { useEffect, useState } from 'react'; -import { DataGrid } from '@mui/x-data-grid'; -import { Box, Card, CircularProgress } from '@mui/material'; -import { useSnackbar } from 'notistack'; import { SyncAlt } from '@mui/icons-material'; import { LoadingButton } from '@mui/lab'; +import { Box, Card, CircularProgress } from '@mui/material'; +import { DataGrid } from '@mui/x-data-grid'; +import { useSnackbar } from 'notistack'; import * as PropTypes from 'prop-types'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import listDatasetTableColumns from '../../api/DatasetTable/listDatasetTableColumns'; -import updateColumnDescription from '../../api/DatasetTable/updateDatasetTableColumn'; -import syncDatasetTableColumns from '../../api/DatasetTable/syncDatasetTableColumns'; -import * as Defaults from '../../components/defaults'; +import React, { useEffect, useState } from 'react'; +import { Defaults } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { listDatasetTableColumns, useClient } from 'services'; +import { updateColumnDescription, syncDatasetTableColumns } from '../services'; -const TableColumns = (props) => { +export const TableColumns = (props) => { const { table, isAdmin } = props; const dispatch = useDispatch(); const client = useClient(); @@ -43,7 +40,7 @@ const TableColumns = (props) => { } }; - const handleEditCellChangeCommitted = (e:GridCellEditCommitParams) => { + const handleEditCellChangeCommitted = (e) => { const data = e.value; if (e.field === 'description') { columns.map((c) => { @@ -99,7 +96,7 @@ const TableColumns = (props) => { const response = await client.query( listDatasetTableColumns({ tableUri: table.tableUri, - filter: Defaults.SelectListFilter + filter: Defaults.selectListFilter }) ); if (!response.errors) { @@ -178,8 +175,8 @@ const TableColumns = (props) => { ); }; + TableColumns.propTypes = { table: PropTypes.object.isRequired, isAdmin: PropTypes.bool.isRequired }; -export default TableColumns; diff --git a/frontend/src/views/Tables/TableMetrics.js b/frontend/src/modules/Tables/components/TableMetrics.js similarity index 96% rename from frontend/src/views/Tables/TableMetrics.js rename to frontend/src/modules/Tables/components/TableMetrics.js index c6ecfa2df..8f17e2c46 100644 --- a/frontend/src/views/Tables/TableMetrics.js +++ b/frontend/src/modules/Tables/components/TableMetrics.js @@ -1,4 +1,6 @@ -import React, { useCallback, useEffect, useState } from 'react'; +import { useTheme } from '@emotion/react'; +import { PlayArrowOutlined, RefreshRounded } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Button, @@ -16,23 +18,21 @@ import { Tooltip, Typography } from '@mui/material'; -import { PlayArrowOutlined, RefreshRounded } from '@mui/icons-material'; +import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import Chart from 'react-apexcharts'; import { CgHashtag } from 'react-icons/cg'; import { VscSymbolString } from 'react-icons/vsc'; -import Chart from 'react-apexcharts'; -import { useTheme } from '@emotion/react'; -import { LoadingButton } from '@mui/lab'; -import * as PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import getDatasetTableProfilingRun from '../../api/DatasetTable/getDatasetTableProfilingRun'; -import startDatasetProfilingRun from '../../api/DatasetTable/startProfilingRun'; -import listDatasetTableProfilingRuns from '../../api/DatasetTable/listDatasetTableProfilingRuns'; -import Label from '../../components/Label'; -import Scrollbar from '../../components/Scrollbar'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; +import { Label, Scrollbar } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { + getDatasetTableProfilingRun, + listDatasetTableProfilingRuns, + startDatasetProfilingRun +} from '../services'; -const TableMetrics = ({ table, isAdmin }) => { +export const TableMetrics = ({ table, isAdmin }) => { const client = useClient(); const dispatch = useDispatch(); const theme = useTheme(); @@ -569,8 +569,8 @@ const TableMetrics = ({ table, isAdmin }) => { ); }; + TableMetrics.propTypes = { table: PropTypes.object.isRequired, isAdmin: PropTypes.bool.isRequired }; -export default TableMetrics; diff --git a/frontend/src/views/Tables/TableOverview.js b/frontend/src/modules/Tables/components/TableOverview.js similarity index 86% rename from frontend/src/views/Tables/TableOverview.js rename to frontend/src/modules/Tables/components/TableOverview.js index ca3e5360d..8d6d9e5b4 100644 --- a/frontend/src/views/Tables/TableOverview.js +++ b/frontend/src/modules/Tables/components/TableOverview.js @@ -1,9 +1,8 @@ import { Box, Grid } from '@mui/material'; import PropTypes from 'prop-types'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; +import { ObjectBrief, ObjectMetadata } from 'design'; -const TableOverview = (props) => { +export const TableOverview = (props) => { const { table, ...other } = props; return ( @@ -42,5 +41,3 @@ const TableOverview = (props) => { TableOverview.propTypes = { table: PropTypes.object.isRequired }; - -export default TableOverview; diff --git a/frontend/src/views/Tables/TablePreview.js b/frontend/src/modules/Tables/components/TablePreview.js similarity index 86% rename from frontend/src/views/Tables/TablePreview.js rename to frontend/src/modules/Tables/components/TablePreview.js index 73878f678..9ed8ed162 100644 --- a/frontend/src/views/Tables/TablePreview.js +++ b/frontend/src/modules/Tables/components/TablePreview.js @@ -1,13 +1,12 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import * as ReactIf from 'react-if'; import { Card, CircularProgress } from '@mui/material'; -import * as PropTypes from 'prop-types'; -import { DataGrid } from '@mui/x-data-grid'; import { styled } from '@mui/styles'; -import previewTable2 from '../../api/DatasetTable/previewTable2'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; +import { DataGrid } from '@mui/x-data-grid'; +import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import * as ReactIf from 'react-if'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { previewTable } from '../services'; const StyledDataGrid = styled(DataGrid)(({ theme }) => ({ '& .MuiDataGrid-columnsContainer': { @@ -17,7 +16,7 @@ const StyledDataGrid = styled(DataGrid)(({ theme }) => ({ : 'rgba(255,255,255,0.38)' } })); -const TablePreview = (props) => { +export const TablePreview = (props) => { const { table } = props; const dispatch = useDispatch(); const client = useClient(); @@ -25,9 +24,9 @@ const TablePreview = (props) => { const [result, setResult] = useState({ rows: [], fields: [] }); const fetchData = useCallback(async () => { setRunning(true); - const response = await client.query(previewTable2(table.tableUri)); + const response = await client.query(previewTable(table.tableUri)); if (!response.errors) { - setResult(response.data.previewTable2); + setResult(response.data.previewTable); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } @@ -85,7 +84,7 @@ const TablePreview = (props) => { ); }; + TablePreview.propTypes = { table: PropTypes.object.isRequired }; -export default TablePreview; diff --git a/frontend/src/modules/Tables/components/index.js b/frontend/src/modules/Tables/components/index.js new file mode 100644 index 000000000..5dda7b857 --- /dev/null +++ b/frontend/src/modules/Tables/components/index.js @@ -0,0 +1,4 @@ +export * from './TableColumns'; +export * from './TableMetrics'; +export * from './TableOverview'; +export * from './TablePreview'; diff --git a/frontend/src/api/DatasetTable/getDatasetTable.js b/frontend/src/modules/Tables/services/getDatasetTable.js similarity index 93% rename from frontend/src/api/DatasetTable/getDatasetTable.js rename to frontend/src/modules/Tables/services/getDatasetTable.js index d26320c9b..68cf347a1 100644 --- a/frontend/src/api/DatasetTable/getDatasetTable.js +++ b/frontend/src/modules/Tables/services/getDatasetTable.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getDatasetTable = (tableUri) => ({ +export const getDatasetTable = (tableUri) => ({ variables: { tableUri }, @@ -53,5 +53,3 @@ const getDatasetTable = (tableUri) => ({ } ` }); - -export default getDatasetTable; diff --git a/frontend/src/api/DatasetTable/getDatasetTableProfilingRun.js b/frontend/src/modules/Tables/services/getDatasetTableProfilingRun.js similarity index 82% rename from frontend/src/api/DatasetTable/getDatasetTableProfilingRun.js rename to frontend/src/modules/Tables/services/getDatasetTableProfilingRun.js index 6013b9eb0..f76f1e7e3 100644 --- a/frontend/src/api/DatasetTable/getDatasetTableProfilingRun.js +++ b/frontend/src/modules/Tables/services/getDatasetTableProfilingRun.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getDatasetTableProfilingRun = (tableUri) => ({ +export const getDatasetTableProfilingRun = (tableUri) => ({ variables: { tableUri }, @@ -23,5 +23,3 @@ const getDatasetTableProfilingRun = (tableUri) => ({ } ` }); - -export default getDatasetTableProfilingRun; diff --git a/frontend/src/modules/Tables/services/index.js b/frontend/src/modules/Tables/services/index.js new file mode 100644 index 000000000..8edb0b861 --- /dev/null +++ b/frontend/src/modules/Tables/services/index.js @@ -0,0 +1,8 @@ +export * from './getDatasetTable'; +export * from './getDatasetTableProfilingRun'; +export * from './listDatasetTableProfilingRuns'; +export * from './previewTable'; +export * from './startDatasetProfilingRun'; +export * from './syncDatasetTableColumns'; +export * from './updateDatasetTable'; +export * from './updateDatasetTableColumn'; diff --git a/frontend/src/api/DatasetTable/listDatasetTableProfilingRuns.js b/frontend/src/modules/Tables/services/listDatasetTableProfilingRuns.js similarity index 81% rename from frontend/src/api/DatasetTable/listDatasetTableProfilingRuns.js rename to frontend/src/modules/Tables/services/listDatasetTableProfilingRuns.js index 3daf73b39..a50f8e535 100644 --- a/frontend/src/api/DatasetTable/listDatasetTableProfilingRuns.js +++ b/frontend/src/modules/Tables/services/listDatasetTableProfilingRuns.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listDatasetTableProfilingRuns = (tableUri) => ({ +export const listDatasetTableProfilingRuns = (tableUri) => ({ variables: { tableUri }, @@ -24,5 +24,3 @@ const listDatasetTableProfilingRuns = (tableUri) => ({ } ` }); - -export default listDatasetTableProfilingRuns; diff --git a/frontend/src/modules/Tables/services/previewTable.js b/frontend/src/modules/Tables/services/previewTable.js new file mode 100644 index 000000000..0e5a0d88c --- /dev/null +++ b/frontend/src/modules/Tables/services/previewTable.js @@ -0,0 +1,15 @@ +import { gql } from 'apollo-boost'; + +export const previewTable = (tableUri) => ({ + variables: { + tableUri + }, + query: gql` + query PreviewTable($tableUri: String!) { + previewTable(tableUri: $tableUri) { + rows + fields + } + } + ` +}); diff --git a/frontend/src/modules/Tables/services/startDatasetProfilingRun.js b/frontend/src/modules/Tables/services/startDatasetProfilingRun.js new file mode 100644 index 000000000..5b65c31ca --- /dev/null +++ b/frontend/src/modules/Tables/services/startDatasetProfilingRun.js @@ -0,0 +1,14 @@ +import { gql } from 'apollo-boost'; + +export const startDatasetProfilingRun = ({ input }) => ({ + variables: { + input + }, + mutation: gql` + mutation startDatasetProfilingRun($input: StartDatasetProfilingRunInput!) { + startDatasetProfilingRun(input: $input) { + profilingRunUri + } + } + ` +}); diff --git a/frontend/src/api/DatasetTable/syncDatasetTableColumns.js b/frontend/src/modules/Tables/services/syncDatasetTableColumns.js similarity index 81% rename from frontend/src/api/DatasetTable/syncDatasetTableColumns.js rename to frontend/src/modules/Tables/services/syncDatasetTableColumns.js index c14e4f210..c91b33b7c 100644 --- a/frontend/src/api/DatasetTable/syncDatasetTableColumns.js +++ b/frontend/src/modules/Tables/services/syncDatasetTableColumns.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const syncDatasetTableColumns = (tableUri) => ({ +export const syncDatasetTableColumns = (tableUri) => ({ variables: { tableUri }, @@ -22,5 +22,3 @@ const syncDatasetTableColumns = (tableUri) => ({ } ` }); - -export default syncDatasetTableColumns; diff --git a/frontend/src/api/DatasetTable/updateDatasetTable.js b/frontend/src/modules/Tables/services/updateDatasetTable.js similarity index 77% rename from frontend/src/api/DatasetTable/updateDatasetTable.js rename to frontend/src/modules/Tables/services/updateDatasetTable.js index 81c1572b8..7aef384c7 100644 --- a/frontend/src/api/DatasetTable/updateDatasetTable.js +++ b/frontend/src/modules/Tables/services/updateDatasetTable.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateDatasetTable = ({ tableUri, input }) => ({ +export const updateDatasetTable = ({ tableUri, input }) => ({ variables: { tableUri, input @@ -16,5 +16,3 @@ const updateDatasetTable = ({ tableUri, input }) => ({ } ` }); - -export default updateDatasetTable; diff --git a/frontend/src/api/DatasetTable/updateDatasetTableColumn.js b/frontend/src/modules/Tables/services/updateDatasetTableColumn.js similarity index 76% rename from frontend/src/api/DatasetTable/updateDatasetTableColumn.js rename to frontend/src/modules/Tables/services/updateDatasetTableColumn.js index 73bb06a12..8f94ae3b7 100644 --- a/frontend/src/api/DatasetTable/updateDatasetTableColumn.js +++ b/frontend/src/modules/Tables/services/updateDatasetTableColumn.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateColumnDescription = ({ columnUri, input }) => ({ +export const updateColumnDescription = ({ columnUri, input }) => ({ variables: { columnUri, input @@ -17,5 +17,3 @@ const updateColumnDescription = ({ columnUri, input }) => ({ } ` }); - -export default updateColumnDescription; diff --git a/frontend/src/views/Tables/TableEditForm.js b/frontend/src/modules/Tables/views/TableEditForm.js similarity index 94% rename from frontend/src/views/Tables/TableEditForm.js rename to frontend/src/modules/Tables/views/TableEditForm.js index ed944e5a1..2fc566a2b 100644 --- a/frontend/src/views/Tables/TableEditForm.js +++ b/frontend/src/modules/Tables/views/TableEditForm.js @@ -1,6 +1,4 @@ -import { useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { LoadingButton } from '@mui/lab'; import { Autocomplete, Box, @@ -17,22 +15,23 @@ import { TextField, Typography } from '@mui/material'; -import { Formik } from 'formik'; import CircularProgress from '@mui/material/CircularProgress'; -import { LoadingButton } from '@mui/lab'; -import * as PropTypes from 'prop-types'; +import { Formik } from 'formik'; import { useSnackbar } from 'notistack'; -import useSettings from '../../hooks/useSettings'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import useClient from '../../hooks/useClient'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import getDatasetTable from '../../api/DatasetTable/getDatasetTable'; -import searchGlossary from '../../api/Glossary/searchGlossary'; -import ChipInput from '../../components/TagsInput'; -import updateDatasetTable from '../../api/DatasetTable/updateDatasetTable'; -import * as Defaults from '../../components/defaults'; +import * as PropTypes from 'prop-types'; +import { useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + Defaults, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { searchGlossary, useClient } from 'services'; +import { getDatasetTable, updateDatasetTable } from '../services'; function TableEditHeader(props) { const { table } = props; @@ -172,7 +171,7 @@ const TableEditForm = () => { ); } setTableTerms(fetchedTerms); - response = client.query(searchGlossary(Defaults.SelectListFilter)); + response = client.query(searchGlossary(Defaults.selectListFilter)); response.then((result) => { if ( result.data.searchGlossary && diff --git a/frontend/src/views/Tables/TableView.js b/frontend/src/modules/Tables/views/TableView.js similarity index 91% rename from frontend/src/views/Tables/TableView.js rename to frontend/src/modules/Tables/views/TableView.js index 11ab7b6d1..36b95c30d 100644 --- a/frontend/src/views/Tables/TableView.js +++ b/frontend/src/modules/Tables/views/TableView.js @@ -1,6 +1,4 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; +import { ForumOutlined, Warning } from '@mui/icons-material'; import { Box, Breadcrumbs, @@ -17,23 +15,27 @@ import { Typography } from '@mui/material'; import * as PropTypes from 'prop-types'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { FaTrash } from 'react-icons/fa'; import { useNavigate } from 'react-router'; -import { ForumOutlined, Warning } from '@mui/icons-material'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import getDatasetTable from '../../api/DatasetTable/getDatasetTable'; -import deleteDatasetTable from '../../api/DatasetTable/deleteDatasetTable'; -import TablePreview from './TablePreview'; -import TableMetrics from './TableMetrics'; -import TableColumns from './TableColumns'; -import TableOverview from './TableOverview'; -import PencilAltIcon from '../../icons/PencilAlt'; -import DeleteObjectModal from '../../components/DeleteObjectModal'; -import FeedComments from '../Feed/FeedComments'; +import { Link as RouterLink, useParams } from 'react-router-dom'; +import { + ChevronRightIcon, + DeleteObjectModal, + PencilAltIcon, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { deleteDatasetTable, useClient } from 'services'; +import { FeedComments } from 'modules/Shared'; +import { getDatasetTable } from '../services'; +import { + TableColumns, + TableMetrics, + TableOverview, + TablePreview +} from '../components'; const tabs = [ { label: 'Preview', value: 'preview' }, diff --git a/frontend/src/views/Worksheets/SQLQueryEditor.js b/frontend/src/modules/Worksheets/components/SQLQueryEditor.js similarity index 87% rename from frontend/src/views/Worksheets/SQLQueryEditor.js rename to frontend/src/modules/Worksheets/components/SQLQueryEditor.js index 2fb3ab7d8..dba02596f 100644 --- a/frontend/src/views/Worksheets/SQLQueryEditor.js +++ b/frontend/src/modules/Worksheets/components/SQLQueryEditor.js @@ -1,10 +1,9 @@ +import Editor from '@monaco-editor/react'; import PropTypes from 'prop-types'; import { useRef } from 'react'; -import Editor from '@monaco-editor/react'; -import useSettings from '../../hooks/useSettings'; -import { THEMES } from '../../constants'; +import { THEMES, useSettings } from 'design'; -const SQLQueryEditor = ({ sql, setSqlBody }) => { +export const SQLQueryEditor = ({ sql, setSqlBody }) => { const { settings } = useSettings(); const valueGetter = useRef(); function handleEditorDidMount(_valueGetter) { @@ -47,4 +46,3 @@ SQLQueryEditor.propTypes = { sql: PropTypes.any.isRequired, setSqlBody: PropTypes.func.isRequired }; -export default SQLQueryEditor; diff --git a/frontend/src/views/Worksheets/WorksheetEditFormModal.js b/frontend/src/modules/Worksheets/components/WorksheetEditFormModal.js similarity index 95% rename from frontend/src/views/Worksheets/WorksheetEditFormModal.js rename to frontend/src/modules/Worksheets/components/WorksheetEditFormModal.js index fae1e27dc..5fbf13423 100644 --- a/frontend/src/views/Worksheets/WorksheetEditFormModal.js +++ b/frontend/src/modules/Worksheets/components/WorksheetEditFormModal.js @@ -1,5 +1,4 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, CardContent, @@ -11,15 +10,15 @@ import { Typography } from '@mui/material'; import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; +import PropTypes from 'prop-types'; import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import ChipInput from '../../components/TagsInput'; -import { updateWorksheet } from '../../api/Worksheet'; +import { ChipInput } from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { updateWorksheet } from '../services'; -const WorksheetEditFormModal = (props) => { +export const WorksheetEditFormModal = (props) => { const { worksheet, onApply, onClose, open, reload, ...other } = props; const { enqueueSnackbar } = useSnackbar(); const dispatch = useDispatch(); @@ -210,5 +209,3 @@ WorksheetEditFormModal.propTypes = { reload: PropTypes.func, open: PropTypes.bool.isRequired }; - -export default WorksheetEditFormModal; diff --git a/frontend/src/views/Worksheets/WorksheetListItem.js b/frontend/src/modules/Worksheets/components/WorksheetListItem.js similarity index 95% rename from frontend/src/views/Worksheets/WorksheetListItem.js rename to frontend/src/modules/Worksheets/components/WorksheetListItem.js index 4230ee33f..2373e8af4 100644 --- a/frontend/src/views/Worksheets/WorksheetListItem.js +++ b/frontend/src/modules/Worksheets/components/WorksheetListItem.js @@ -8,17 +8,15 @@ import { Tooltip, Typography } from '@mui/material'; -import * as FaIcons from 'react-icons/fa'; -import { Link as RouterLink } from 'react-router-dom'; import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; -import { AiOutlineExperiment } from 'react-icons/ai'; import React from 'react'; -import IconAvatar from '../../components/IconAvatar'; -import Label from '../../components/Label'; -import useCardStyle from '../../hooks/useCardStyle'; +import { AiOutlineExperiment } from 'react-icons/ai'; +import * as FaIcons from 'react-icons/fa'; +import { useNavigate } from 'react-router'; +import { Link as RouterLink } from 'react-router-dom'; +import { IconAvatar, Label, useCardStyle } from 'design'; -const WorksheetListItem = (props) => { +export const WorksheetListItem = (props) => { const { worksheet } = props; const classes = useCardStyle(); const navigate = useNavigate(); @@ -197,4 +195,3 @@ const WorksheetListItem = (props) => { WorksheetListItem.propTypes = { worksheet: PropTypes.object.isRequired }; -export default WorksheetListItem; diff --git a/frontend/src/views/Worksheets/WorksheetResult.js b/frontend/src/modules/Worksheets/components/WorksheetResult.js similarity index 93% rename from frontend/src/views/Worksheets/WorksheetResult.js rename to frontend/src/modules/Worksheets/components/WorksheetResult.js index 6f5261fac..e8e74e557 100644 --- a/frontend/src/views/Worksheets/WorksheetResult.js +++ b/frontend/src/modules/Worksheets/components/WorksheetResult.js @@ -1,4 +1,3 @@ -import * as ReactIf from 'react-if'; import { Box, Card, @@ -12,12 +11,13 @@ import { TableHead, TableRow } from '@mui/material'; -import { FaBars } from 'react-icons/fa'; -import React from 'react'; import PropTypes from 'prop-types'; -import Scrollbar from '../../components/Scrollbar'; +import React from 'react'; +import { FaBars } from 'react-icons/fa'; +import * as ReactIf from 'react-if'; +import { Scrollbar } from 'design'; -const WorksheetResult = ({ results, loading }) => { +export const WorksheetResult = ({ results, loading }) => { if (loading) { return ; } @@ -72,4 +72,3 @@ WorksheetResult.propTypes = { results: PropTypes.object.isRequired, loading: PropTypes.bool.isRequired }; -export default WorksheetResult; diff --git a/frontend/src/modules/Worksheets/components/index.js b/frontend/src/modules/Worksheets/components/index.js new file mode 100644 index 000000000..54d2e1f8c --- /dev/null +++ b/frontend/src/modules/Worksheets/components/index.js @@ -0,0 +1,4 @@ +export * from './SQLQueryEditor'; +export * from './WorksheetEditFormModal'; +export * from './WorksheetListItem'; +export * from './WorksheetResult'; diff --git a/frontend/src/api/Worksheet/createWorksheet.js b/frontend/src/modules/Worksheets/services/createWorksheet.js similarity index 78% rename from frontend/src/api/Worksheet/createWorksheet.js rename to frontend/src/modules/Worksheets/services/createWorksheet.js index 0579d423f..28883b9f2 100644 --- a/frontend/src/api/Worksheet/createWorksheet.js +++ b/frontend/src/modules/Worksheets/services/createWorksheet.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createWorksheet = (input) => ({ +export const createWorksheet = (input) => ({ variables: { input }, @@ -14,5 +14,3 @@ const createWorksheet = (input) => ({ } ` }); - -export default createWorksheet; diff --git a/frontend/src/modules/Worksheets/services/deleteWorksheet.js b/frontend/src/modules/Worksheets/services/deleteWorksheet.js new file mode 100644 index 000000000..826e36eee --- /dev/null +++ b/frontend/src/modules/Worksheets/services/deleteWorksheet.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteWorksheet = (worksheetUri) => ({ + variables: { + worksheetUri + }, + mutation: gql` + mutation deleteWorksheet($worksheetUri: String!) { + deleteWorksheet(worksheetUri: $worksheetUri) + } + ` +}); diff --git a/frontend/src/api/Worksheet/getWorksheet.js b/frontend/src/modules/Worksheets/services/getWorksheet.js similarity index 92% rename from frontend/src/api/Worksheet/getWorksheet.js rename to frontend/src/modules/Worksheets/services/getWorksheet.js index 99e1e9f91..3abafd491 100644 --- a/frontend/src/api/Worksheet/getWorksheet.js +++ b/frontend/src/modules/Worksheets/services/getWorksheet.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getWorksheet = (worksheetUri) => ({ +export const getWorksheet = (worksheetUri) => ({ variables: { worksheetUri }, @@ -47,5 +47,3 @@ const getWorksheet = (worksheetUri) => ({ } ` }); - -export default getWorksheet; diff --git a/frontend/src/modules/Worksheets/services/index.js b/frontend/src/modules/Worksheets/services/index.js new file mode 100644 index 000000000..874f93676 --- /dev/null +++ b/frontend/src/modules/Worksheets/services/index.js @@ -0,0 +1,6 @@ +export * from './createWorksheet'; +export * from './deleteWorksheet'; +export * from './getWorksheet'; +export * from './listWorksheets'; +export * from './runAthenaSqlQuery'; +export * from './updateWorksheet'; diff --git a/frontend/src/api/Worksheet/listWorksheets.js b/frontend/src/modules/Worksheets/services/listWorksheets.js similarity index 86% rename from frontend/src/api/Worksheet/listWorksheets.js rename to frontend/src/modules/Worksheets/services/listWorksheets.js index 86704d242..4d42e0d59 100644 --- a/frontend/src/api/Worksheet/listWorksheets.js +++ b/frontend/src/modules/Worksheets/services/listWorksheets.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listWorksheets = ({ filter }) => ({ +export const listWorksheets = ({ filter }) => ({ variables: { filter }, @@ -26,5 +26,3 @@ const listWorksheets = ({ filter }) => ({ } ` }); - -export default listWorksheets; diff --git a/frontend/src/modules/Worksheets/services/runAthenaSqlQuery.js b/frontend/src/modules/Worksheets/services/runAthenaSqlQuery.js new file mode 100644 index 000000000..8d5666cf4 --- /dev/null +++ b/frontend/src/modules/Worksheets/services/runAthenaSqlQuery.js @@ -0,0 +1,38 @@ +import { gql } from 'apollo-boost'; + +export const runAthenaSqlQuery = ({ + sqlQuery, + environmentUri, + worksheetUri +}) => ({ + variables: { + sqlQuery, + environmentUri, + worksheetUri + }, + query: gql` + query runAthenaSqlQuery( + $environmentUri: String! + $worksheetUri: String! + $sqlQuery: String! + ) { + runAthenaSqlQuery( + environmentUri: $environmentUri + worksheetUri: $worksheetUri + sqlQuery: $sqlQuery + ) { + rows { + cells { + columnName + typeName + value + } + } + columns { + columnName + typeName + } + } + } + ` +}); diff --git a/frontend/src/api/Worksheet/updateWorksheet.js b/frontend/src/modules/Worksheets/services/updateWorksheet.js similarity index 79% rename from frontend/src/api/Worksheet/updateWorksheet.js rename to frontend/src/modules/Worksheets/services/updateWorksheet.js index bb65681a2..eaa8b116a 100644 --- a/frontend/src/api/Worksheet/updateWorksheet.js +++ b/frontend/src/modules/Worksheets/services/updateWorksheet.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateWorksheet = ({ worksheetUri, input }) => ({ +export const updateWorksheet = ({ worksheetUri, input }) => ({ variables: { worksheetUri, input @@ -18,5 +18,3 @@ const updateWorksheet = ({ worksheetUri, input }) => ({ } ` }); - -export default updateWorksheet; diff --git a/frontend/src/views/Worksheets/WorksheetCreateForm.js b/frontend/src/modules/Worksheets/views/WorksheetCreateForm.js similarity index 95% rename from frontend/src/views/Worksheets/WorksheetCreateForm.js rename to frontend/src/modules/Worksheets/views/WorksheetCreateForm.js index d70e4cba9..0a307559f 100644 --- a/frontend/src/views/Worksheets/WorksheetCreateForm.js +++ b/frontend/src/modules/Worksheets/views/WorksheetCreateForm.js @@ -1,7 +1,4 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -17,17 +14,20 @@ import { TextField, Typography } from '@mui/material'; +import { Formik } from 'formik'; +import { useSnackbar } from 'notistack'; import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import useGroups from '../../hooks/useGroups'; -import { createWorksheet } from '../../api/Worksheet'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import * as Yup from 'yup'; +import { + ArrowLeftIcon, + ChevronRightIcon, + ChipInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient, useGroups } from 'services'; +import { createWorksheet } from '../services'; const WorksheetCreateForm = (props) => { const navigate = useNavigate(); diff --git a/frontend/src/views/Worksheets/WorksheetList.js b/frontend/src/modules/Worksheets/views/WorksheetList.js similarity index 83% rename from frontend/src/views/Worksheets/WorksheetList.js rename to frontend/src/modules/Worksheets/views/WorksheetList.js index 9d6a7b55a..6fc20eba8 100644 --- a/frontend/src/views/Worksheets/WorksheetList.js +++ b/frontend/src/modules/Worksheets/views/WorksheetList.js @@ -1,5 +1,4 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import { LoadingButton } from '@mui/lab'; import { Box, Breadcrumbs, @@ -9,20 +8,22 @@ import { Typography } from '@mui/material'; import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; import PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import useSettings from '../../hooks/useSettings'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import WorksheetListItem from './WorksheetListItem'; -import * as WorksheetApi from '../../api/Worksheet'; -import PlusIcon from '../../icons/Plus'; +import { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; +import { Link as RouterLink, useNavigate } from 'react-router-dom'; +import { + ChevronRightIcon, + Defaults, + Pager, + PlusIcon, + SearchInput, + useSettings +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { useClient } from 'services'; +import { listWorksheets } from '../services'; +import { WorksheetListItem } from '../components'; function WorksheetsPageHeader({ navigate }) { const startWorksheetSession = () => { @@ -81,8 +82,8 @@ WorksheetsPageHeader.propTypes = { const WorksheetList = () => { const dispatch = useDispatch(); const navigate = useNavigate(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); + const [items, setItems] = useState(Defaults.pagedResponse); + const [filter, setFilter] = useState(Defaults.filter); const { settings } = useSettings(); const [inputValue, setInputValue] = useState(''); const [loading, setLoading] = useState(true); @@ -90,9 +91,7 @@ const WorksheetList = () => { const fetchItems = useCallback(async () => { setLoading(true); - const response = await client.query( - WorksheetApi.listWorksheets({ filter }) - ); + const response = await client.query(listWorksheets({ filter })); if (!response.errors) { setItems(response.data.listWorksheets); } else { @@ -108,7 +107,7 @@ const WorksheetList = () => { const handleInputKeyup = (event) => { if (event.code === 'Enter') { - setFilter({page: 1, term: event.target.value}); + setFilter({ page: 1, term: event.target.value }); fetchItems().catch((e) => dispatch({ type: SET_ERROR, error: e.message }) ); diff --git a/frontend/src/views/Worksheets/WorksheetView.js b/frontend/src/modules/Worksheets/views/WorksheetView.js similarity index 87% rename from frontend/src/views/Worksheets/WorksheetView.js rename to frontend/src/modules/Worksheets/views/WorksheetView.js index c5e2a2ab8..98912bccd 100644 --- a/frontend/src/views/Worksheets/WorksheetView.js +++ b/frontend/src/modules/Worksheets/views/WorksheetView.js @@ -1,5 +1,5 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Helmet } from 'react-helmet-async'; +import { PlayArrowOutlined, SaveOutlined } from '@mui/icons-material'; +import { LoadingButton } from '@mui/lab'; import { Box, Card, @@ -14,38 +14,43 @@ import { Tooltip, Typography } from '@mui/material'; -import { useNavigate, useParams } from 'react-router-dom'; +import { useSnackbar } from 'notistack'; +import React, { useCallback, useEffect, useState } from 'react'; +import { Helmet } from 'react-helmet-async'; import { CgHashtag } from 'react-icons/cg'; import { FaTrash } from 'react-icons/fa'; import { VscSymbolString } from 'react-icons/vsc'; -import { PlayArrowOutlined, SaveOutlined } from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; -import { useSnackbar } from 'notistack'; -import { useDispatch } from '../../store'; -import getWorksheet from '../../api/Worksheet/getWorksheet'; -import updateWorksheet from '../../api/Worksheet/updateWorksheet'; -import runAthenaSqlQuery from '../../api/Worksheet/runAthenaSqlQuery'; -import deleteWorksheet from '../../api/Worksheet/deleteWorksheet'; -import useClient from '../../hooks/useClient'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import { SET_ERROR } from '../../store/errorReducer'; -import listDatasetsOwnedByEnvGroup from '../../api/Environment/listDatasetsOwnedByEnvGroup'; -import listDatasetTables from '../../api/Dataset/listDatasetTables'; -import getSharedDatasetTables from '../../api/DatasetTable/getSharedDatasetTables'; -import listDatasetTableColumns from '../../api/DatasetTable/listDatasetTableColumns'; -import searchEnvironmentDataItems from '../../api/Environment/listDatasetsPublishedInEnvironment'; -import PencilAltIcon from '../../icons/PencilAlt'; -import Scrollbar from '../../components/Scrollbar'; -import SQLQueryEditor from './SQLQueryEditor'; -import WorksheetResult from './WorksheetResult'; -import WorksheetEditFormModal from './WorksheetEditFormModal'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import * as Defaults from '../../components/defaults'; - - +import { useNavigate, useParams } from 'react-router-dom'; +import { + Defaults, + DeleteObjectWithFrictionModal, + PencilAltIcon, + Scrollbar +} from 'design'; +import { SET_ERROR, useDispatch } from 'globalErrors'; +import { + listDatasetTables, + getSharedDatasetTables, + listDatasetTableColumns, + listDatasetsOwnedByEnvGroup, + listEnvironmentGroups, + listEnvironments, + searchEnvironmentDataItems, + useClient +} from 'services'; +import { + deleteWorksheet, + getWorksheet, + runAthenaSqlQuery, + updateWorksheet +} from '../services'; +import { + SQLQueryEditor, + WorksheetEditFormModal, + WorksheetResult +} from '../components'; -const WorksheetView = () => { +export const WorksheetView = () => { const navigate = useNavigate(); const params = useParams(); const dispatch = useDispatch(); @@ -89,7 +94,7 @@ const WorksheetView = () => { const fetchEnvironments = useCallback(async () => { setLoadingEnvs(true); const response = await client.query( - listEnvironments({ filter: Defaults.DefaultFilter }) + listEnvironments({ filter: Defaults.filter }) ); if (!response.errors) { setEnvironmentOptions( @@ -109,7 +114,7 @@ const WorksheetView = () => { try { const response = await client.query( listEnvironmentGroups({ - filter: Defaults.SelectListFilter, + filter: Defaults.selectListFilter, environmentUri }) ); @@ -135,24 +140,26 @@ const WorksheetView = () => { let sharedWithDatabases = []; let response = await client.query( listDatasetsOwnedByEnvGroup({ - filter: { - term: '', - page: 1, + filter: { + term: '', + page: 1, pageSize: 10000 }, environmentUri: environment.environmentUri, groupUri: team - })); + }) + ); if (response.errors) { dispatch({ type: SET_ERROR, error: response.errors[0].message }); } if (response.data.listDatasetsOwnedByEnvGroup.nodes) { - ownedDatabases = - response.data.listDatasetsOwnedByEnvGroup.nodes?.map((d) => ({ + ownedDatabases = response.data.listDatasetsOwnedByEnvGroup.nodes?.map( + (d) => ({ ...d, value: d.datasetUri, label: d.GlueDatabaseName - })); + }) + ); } response = await client.query( searchEnvironmentDataItems({ @@ -175,7 +182,8 @@ const WorksheetView = () => { datasetUri: d.datasetUri, value: d.datasetUri, label: `${d.GlueDatabaseName}_shared_${d.shareUri}`, - GlueDatabaseName: `${d.GlueDatabaseName}_shared_${d.shareUri}`.substring(0,254), + GlueDatabaseName: + `${d.GlueDatabaseName}_shared_${d.shareUri}`.substring(0, 254), environmentUri: d.environmentUri })); } @@ -187,42 +195,41 @@ const WorksheetView = () => { const fetchTables = useCallback( async (environment, dataset) => { setLoadingTables(true); - let response = "" - if (dataset.GlueDatabaseName.includes(dataset.datasetUri+"_shared_")){ + let response = ''; + if (dataset.GlueDatabaseName.includes(dataset.datasetUri + '_shared_')) { response = await client.query( getSharedDatasetTables({ datasetUri: dataset.datasetUri, envUri: environment.environmentUri }) ); - } else{ + } else { response = await client.query( listDatasetTables({ datasetUri: dataset.datasetUri, - filter: Defaults.SelectListFilter + filter: Defaults.selectListFilter }) ); } - if (!response.errors && dataset.GlueDatabaseName.includes(dataset.datasetUri+"_shared_")) { + if ( + !response.errors && + dataset.GlueDatabaseName.includes(dataset.datasetUri + '_shared_') + ) { setTableOptions( - response.data.getSharedDatasetTables.map((t) => ( - { - ...t, - value: t.tableUri, - label: t.GlueTableName - } - )) + response.data.getSharedDatasetTables.map((t) => ({ + ...t, + value: t.tableUri, + label: t.GlueTableName + })) ); - } else if(!response.errors){ + } else if (!response.errors) { setTableOptions( - response.data.getDataset.tables.nodes.map((t) => ( - { - ...t, - value: t.tableUri, - label: t.GlueTableName - } - )) + response.data.getDataset.tables.nodes.map((t) => ({ + ...t, + value: t.tableUri, + label: t.GlueTableName + })) ); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); @@ -237,7 +244,7 @@ const WorksheetView = () => { const response = await client.query( listDatasetTableColumns({ tableUri: table.tableUri, - filter: Defaults.SelectListFilter + filter: Defaults.selectListFilter }) ); if (!response.errors) { @@ -285,9 +292,9 @@ const WorksheetView = () => { setRunningQuery(true); const response = await client.query( runAthenaSqlQuery({ - sqlQuery: sqlBody, - environmentUri:currentEnv.environmentUri, - worksheetUri: worksheet.worksheetUri + sqlQuery: sqlBody, + environmentUri: currentEnv.environmentUri, + worksheetUri: worksheet.worksheetUri }) ); if (!response.errors) { @@ -357,11 +364,9 @@ const WorksheetView = () => { setTableOptions([]); setCurrentTeam(''); setCurrentEnv(event.target.value); - fetchGroups( - event.target.value.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); + fetchGroups(event.target.value.environmentUri).catch((e) => + dispatch({ type: SET_ERROR, error: e.message }) + ); } function handleTeamChange(event) { @@ -372,7 +377,7 @@ const WorksheetView = () => { setTableOptions([]); setCurrentTeam(event.target.value); fetchDatabases(currentEnv, event.target.value).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) + dispatch({ type: SET_ERROR, error: e.message }) ); } @@ -482,9 +487,7 @@ const WorksheetView = () => { }} > {groupOptions.map((group) => ( - + {group.label} ))} @@ -696,5 +699,3 @@ const WorksheetView = () => { ); }; - -export default WorksheetView; diff --git a/frontend/src/modules/constants.js b/frontend/src/modules/constants.js new file mode 100644 index 000000000..c8091f7f2 --- /dev/null +++ b/frontend/src/modules/constants.js @@ -0,0 +1,14 @@ +export const Topics = [ + 'Finances', + 'HumanResources', + 'Products', + 'Services', + 'Operations', + 'Research', + 'Sales', + 'Orders', + 'Sites', + 'Energy', + 'Customers', + 'Misc' +]; diff --git a/frontend/src/reportWebVitals.js b/frontend/src/reportWebVitals.js index 532f29b0b..6d6b07481 100644 --- a/frontend/src/reportWebVitals.js +++ b/frontend/src/reportWebVitals.js @@ -1,4 +1,4 @@ -const reportWebVitals = (onPerfEntry) => { +export const reportWebVitals = (onPerfEntry) => { if (onPerfEntry && onPerfEntry instanceof Function) { import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { getCLS(onPerfEntry); @@ -9,5 +9,3 @@ const reportWebVitals = (onPerfEntry) => { }); } }; - -export default reportWebVitals; diff --git a/frontend/src/routes.js b/frontend/src/routes.js index bee78a593..cc0a50dd9 100644 --- a/frontend/src/routes.js +++ b/frontend/src/routes.js @@ -1,8 +1,7 @@ import { lazy, Suspense } from 'react'; -import AuthGuard from './components/AuthGuard'; -import GuestGuard from './components/GuestGuard'; -import LoadingScreen from './components/LoadingScreen'; -import DefaultLayout from './components/layout/DefaultLayout'; +import { AuthGuard, GuestGuard } from 'authentication'; +import { DefaultLayout, LoadingScreen } from 'design'; +import { ModuleNames, isModuleEnabled } from 'utils'; const Loadable = (Component) => (props) => ( @@ -12,152 +11,149 @@ const Loadable = (Component) => (props) => ); // Authentication pages -const Login = Loadable(lazy(() => import('./views/authentication/Login'))); +const Login = Loadable(lazy(() => import('./authentication/views/Login'))); // Error pages -const NotFound = Loadable(lazy(() => import('./views/NotFound'))); +const NotFound = Loadable( + lazy(() => import('./modules/NotFound/views/NotFound')) +); const OrganizationList = Loadable( - lazy(() => import('./views/Organizations/OrganizationList')) + lazy(() => import('./modules/Organizations/views/OrganizationList')) ); const OrganizationView = Loadable( - lazy(() => import('./views/Organizations/OrganizationView')) + lazy(() => import('./modules/Organizations/views/OrganizationView')) ); const OrganizationCreateForm = Loadable( - lazy(() => import('./views/Organizations/OrganizationCreateForm')) + lazy(() => import('./modules/Organizations/views/OrganizationCreateForm')) ); const OrganizationEditForm = Loadable( - lazy(() => import('./views/Organizations/OrganizationEditForm')) + lazy(() => import('./modules/Organizations/views/OrganizationEditForm')) ); const EnvironmentCreateForm = Loadable( - lazy(() => import('./views/Environments/EnvironmentCreateForm')) + lazy(() => import('./modules/Environments/views/EnvironmentCreateForm')) ); const EnvironmentEditForm = Loadable( - lazy(() => import('./views/Environments/EnvironmentEditForm')) + lazy(() => import('./modules/Environments/views/EnvironmentEditForm')) ); const EnvironmentView = Loadable( - lazy(() => import('./views/Environments/EnvironmentView')) + lazy(() => import('./modules/Environments/views/EnvironmentView')) ); const EnvironmentList = Loadable( - lazy(() => import('./views/Environments/EnvironmentList')) + lazy(() => import('./modules/Environments/views/EnvironmentList')) ); -const Catalog = Loadable(lazy(() => import('./views/Catalog/Catalog'))); +const Catalog = Loadable(lazy(() => import('./modules/Catalog/views/Catalog'))); const DatasetList = Loadable( - lazy(() => import('./views/Datasets/DatasetList')) + lazy(() => import('./modules/Datasets/views/DatasetList')) ); const DatasetView = Loadable( - lazy(() => import('./views/Datasets/DatasetView')) + lazy(() => import('./modules/Datasets/views/DatasetView')) ); const DatasetCreateForm = Loadable( - lazy(() => import('./views/Datasets/DatasetCreateForm')) + lazy(() => import('./modules/Datasets/views/DatasetCreateForm')) ); const DatasetImportForm = Loadable( - lazy(() => import('./views/Datasets/DatasetImportForm')) + lazy(() => import('./modules/Datasets/views/DatasetImportForm')) ); const DatasetEditForm = Loadable( - lazy(() => import('./views/Datasets/DatasetEditForm')) + lazy(() => import('./modules/Datasets/views/DatasetEditForm')) +); +const TableView = Loadable( + lazy(() => import('./modules/Tables/views/TableView')) ); -const TableView = Loadable(lazy(() => import('./views/Tables/TableView'))); const TableEditForm = Loadable( - lazy(() => import('./views/Tables/TableEditForm')) + lazy(() => import('./modules/Tables/views/TableEditForm')) ); const FolderCreateForm = Loadable( - lazy(() => import('./views/Folders/FolderCreateForm')) + lazy(() => import('./modules/Folders/views/FolderCreateForm')) +); +const FolderView = Loadable( + lazy(() => import('./modules/Folders/views/FolderView')) ); -const FolderView = Loadable(lazy(() => import('./views/Folders/FolderView'))); const FolderEditForm = Loadable( - lazy(() => import('./views/Folders/FolderEditForm')) + lazy(() => import('./modules/Folders/views/FolderEditForm')) ); const NotebookList = Loadable( - lazy(() => import('./views/Notebooks/NotebookList')) + lazy(() => import('./modules/Notebooks/views/NotebookList')) ); const NotebookView = Loadable( - lazy(() => import('./views/Notebooks/NotebookView')) + lazy(() => import('./modules/Notebooks/views/NotebookView')) ); const NotebookCreateForm = Loadable( - lazy(() => import('./views/Notebooks/NotebookCreateForm')) + lazy(() => import('./modules/Notebooks/views/NotebookCreateForm')) ); const MLStudioList = Loadable( - lazy(() => import('./views/MLStudio/NotebookList')) + lazy(() => import('./modules/MLStudio/views/MLStudioList')) ); const MLStudioView = Loadable( - lazy(() => import('./views/MLStudio/NotebookView')) + lazy(() => import('./modules/MLStudio/views/MLStudioView')) ); const MLStudioCreateForm = Loadable( - lazy(() => import('./views/MLStudio/NotebookCreateForm')) + lazy(() => import('./modules/MLStudio/views/MLStudioCreateForm')) ); const DashboardList = Loadable( - lazy(() => import('./views/Dashboards/DashboardList')) + lazy(() => import('./modules/Dashboards/views/DashboardList')) ); const DashboardImportForm = Loadable( - lazy(() => import('./views/Dashboards/DashboardImportForm')) + lazy(() => import('./modules/Dashboards/views/DashboardImportForm')) ); const DashboardEditForm = Loadable( - lazy(() => import('./views/Dashboards/DashboardEditForm')) + lazy(() => import('./modules/Dashboards/views/DashboardEditForm')) ); const DashboardView = Loadable( - lazy(() => import('./views/Dashboards/DashboardView')) + lazy(() => import('./modules/Dashboards/views/DashboardView')) ); const DashboardSessionStarter = Loadable( - lazy(() => import('./views/Dashboards/DashboardSessionStarter')) + lazy(() => import('./modules/Dashboards/views/DashboardSessionStarter')) ); const PipelineList = Loadable( - lazy(() => import('./views/Pipelines/PipelineList')) + lazy(() => import('./modules/Pipelines/views/PipelineList')) ); const PipelineView = Loadable( - lazy(() => import('./views/Pipelines/PipelineView')) + lazy(() => import('./modules/Pipelines/views/PipelineView')) ); const PipelineCreateForm = Loadable( - lazy(() => import('./views/Pipelines/PipelineCreateForm')) + lazy(() => import('./modules/Pipelines/views/PipelineCreateForm')) ); const PipelineEditForm = Loadable( - lazy(() => import('./views/Pipelines/PipelineEditForm')) + lazy(() => import('./modules/Pipelines/views/PipelineEditForm')) ); -const WarehouseCreateForm = Loadable( - lazy(() => import('./views/Warehouses/WarehouseCreateForm')) +const ShareList = Loadable( + lazy(() => import('./modules/Shares/views/ShareList')) ); -const WarehouseView = Loadable( - lazy(() => import('./views/Warehouses/WarehouseView')) +const ShareView = Loadable( + lazy(() => import('./modules/Shares/views/ShareView')) ); -const WarehouseEditForm = Loadable( - lazy(() => import('./views/Warehouses/WarehouseEditForm')) -); -const WarehouseImportForm = Loadable( - lazy(() => import('./views/Warehouses/WarehouseImportForm')) -); - -const ShareList = Loadable(lazy(() => import('./views/Shares/ShareList'))); -const ShareView = Loadable(lazy(() => import('./views/Shares/ShareView'))); const WorksheetList = Loadable( - lazy(() => import('./views/Worksheets/WorksheetList')) + lazy(() => import('./modules/Worksheets/views/WorksheetList')) ); const WorksheetView = Loadable( - lazy(() => import('./views/Worksheets/WorksheetView')) + lazy(() => import('./modules/Worksheets/views/WorksheetView')) ); const WorksheetCreateForm = Loadable( - lazy(() => import('./views/Worksheets/WorksheetCreateForm')) + lazy(() => import('./modules/Worksheets/views/WorksheetCreateForm')) ); const GlossaryList = Loadable( - lazy(() => import('./views/Glossaries/GlossaryList')) + lazy(() => import('./modules/Glossaries/views/GlossaryList')) ); const GlossaryView = Loadable( - lazy(() => import('./views/Glossaries/GlossaryView')) + lazy(() => import('./modules/Glossaries/views/GlossaryView')) ); const GlossaryCreateForm = Loadable( - lazy(() => import('./views/Glossaries/GlossaryCreateForm')) + lazy(() => import('./modules/Glossaries/views/GlossaryCreateForm')) ); const AdministrationView = Loadable( - lazy(() => import('./views/Administration/AdministrationView')) + lazy(() => import('./modules/Administration/views/AdministrationView')) ); const routes = [ @@ -205,14 +201,6 @@ const routes = [ } ] }, - { - path: 'warehouse/:uri', - element: - }, - { - path: 'warehouse/:uri/edit', - element: - }, { children: [ { @@ -226,22 +214,14 @@ const routes = [ { path: 'environments/:uri/edit', element: - }, - { - path: 'environments/:uri/warehouses/new', - element: - }, - { - path: 'environments/:uri/warehouses/import', - element: } ] }, - { + isModuleEnabled(ModuleNames.CATALOG) && { path: 'catalog', element: }, - { + isModuleEnabled(ModuleNames.DATASETS) && { children: [ { path: 'datasets', @@ -289,7 +269,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.MLSTUDIO) && { children: [ { path: 'mlstudio', @@ -305,7 +285,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.NOTEBOOKS) && { children: [ { path: 'notebooks', @@ -321,7 +301,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.DASHBOARDS) && { children: [ { path: 'dashboards', @@ -345,7 +325,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.PIPELINES) && { children: [ { path: 'pipelines', @@ -365,7 +345,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.SHARES) && { children: [ { path: 'shares', @@ -377,7 +357,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.WORKSHEETS) && { children: [ { path: 'worksheets', @@ -393,7 +373,7 @@ const routes = [ } ] }, - { + isModuleEnabled(ModuleNames.GLOSSARIES) && { children: [ { path: 'glossaries', diff --git a/frontend/src/serviceWorker.js b/frontend/src/serviceWorker.js index 6a6d3dcb2..f92279ceb 100644 --- a/frontend/src/serviceWorker.js +++ b/frontend/src/serviceWorker.js @@ -44,7 +44,7 @@ export function register(config) { // Add some additional logging to localhost, pointing developers to the // service worker/PWA documentation. navigator.serviceWorker.ready.then(() => { - console.log( + console.info( 'This web app is being served cache-first by a service ' + 'worker. To learn more, visit https://bit.ly/CRA-PWA' ); @@ -72,7 +72,7 @@ function registerValidSW(swUrl, config) { // At this point, the updated precached content has been fetched, // but the previous service worker will still serve the older // content until all client tabs are closed. - console.log( + console.info( 'New content is available and will be used when all ' + 'tabs for this page are closed. See https://bit.ly/CRA-PWA.' ); @@ -85,7 +85,7 @@ function registerValidSW(swUrl, config) { // At this point, everything has been precached. // It's the perfect time to display a // "Content is cached for offline use." message. - console.log('Content is cached for offline use.'); + console.info('Content is cached for offline use.'); // Execute callback if (config && config.onSuccess) { @@ -125,9 +125,7 @@ function checkValidServiceWorker(swUrl, config) { } }) .catch(() => { - console.log( - 'No internet connection found. App is running in offline mode.' - ); + console.error( 'No internet connection found. App is running in offline mode.' ); }); } diff --git a/frontend/src/services/graphql/ApiKeys/createApiKey.js b/frontend/src/services/graphql/ApiKeys/createApiKey.js new file mode 100644 index 000000000..30f48a85e --- /dev/null +++ b/frontend/src/services/graphql/ApiKeys/createApiKey.js @@ -0,0 +1,13 @@ +import { gql } from 'apollo-boost'; + +export const createApiKey = () => ({ + mutation: gql` + mutation CreateApiKey { + createApiKey { + ApiKeyId + ApiKeySecret + expires + } + } + ` +}); diff --git a/frontend/src/services/graphql/ApiKeys/deleteApiKey.js b/frontend/src/services/graphql/ApiKeys/deleteApiKey.js new file mode 100644 index 000000000..a2bad0fe8 --- /dev/null +++ b/frontend/src/services/graphql/ApiKeys/deleteApiKey.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteApiKey = (ApiKeyId) => ({ + variables: { + ApiKeyId + }, + mutation: gql` + mutation DeleteApiKey($ApiKeyId: String!) { + deleteApiKey(ApiKeyId: $ApiKeyId) + } + ` +}); diff --git a/frontend/src/services/graphql/ApiKeys/index.js b/frontend/src/services/graphql/ApiKeys/index.js new file mode 100644 index 000000000..7f9f7415c --- /dev/null +++ b/frontend/src/services/graphql/ApiKeys/index.js @@ -0,0 +1,3 @@ +export * from './createApiKey'; +export * from './deleteApiKey'; +export * from './listApiKeys'; diff --git a/frontend/src/api/ApiKeys/listApiKeys.js b/frontend/src/services/graphql/ApiKeys/listApiKeys.js similarity index 76% rename from frontend/src/api/ApiKeys/listApiKeys.js rename to frontend/src/services/graphql/ApiKeys/listApiKeys.js index 7de87d100..7081bd88f 100644 --- a/frontend/src/api/ApiKeys/listApiKeys.js +++ b/frontend/src/services/graphql/ApiKeys/listApiKeys.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listApiKeys = () => ({ +export const listApiKeys = () => ({ query: gql` query ListApiKeys { listApiKeys { @@ -13,5 +13,3 @@ const listApiKeys = () => ({ } ` }); - -export default listApiKeys; diff --git a/frontend/src/services/graphql/Dashboard/index.js b/frontend/src/services/graphql/Dashboard/index.js new file mode 100644 index 000000000..a9a68b2b7 --- /dev/null +++ b/frontend/src/services/graphql/Dashboard/index.js @@ -0,0 +1 @@ +export * from './requestDashboardShare'; diff --git a/frontend/src/api/Dashboard/requestDashboardShare.js b/frontend/src/services/graphql/Dashboard/requestDashboardShare.js similarity index 78% rename from frontend/src/api/Dashboard/requestDashboardShare.js rename to frontend/src/services/graphql/Dashboard/requestDashboardShare.js index c7feaaafd..5e42326ec 100644 --- a/frontend/src/api/Dashboard/requestDashboardShare.js +++ b/frontend/src/services/graphql/Dashboard/requestDashboardShare.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const requestDashboardShare = (dashboardUri, principalId) => ({ +export const requestDashboardShare = (dashboardUri, principalId) => ({ variables: { dashboardUri, principalId @@ -20,5 +20,3 @@ const requestDashboardShare = (dashboardUri, principalId) => ({ } ` }); - -export default requestDashboardShare; diff --git a/frontend/src/services/graphql/DatasetTable/deleteDatasetTable.js b/frontend/src/services/graphql/DatasetTable/deleteDatasetTable.js new file mode 100644 index 000000000..5f35892f3 --- /dev/null +++ b/frontend/src/services/graphql/DatasetTable/deleteDatasetTable.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deleteDatasetTable = ({ tableUri }) => ({ + variables: { + tableUri + }, + mutation: gql` + mutation deleteDatasetTable($tableUri: String!) { + deleteDatasetTable(tableUri: $tableUri) + } + ` +}); diff --git a/frontend/src/services/graphql/DatasetTable/getSharedDatasetTables.js b/frontend/src/services/graphql/DatasetTable/getSharedDatasetTables.js new file mode 100644 index 000000000..852c1b582 --- /dev/null +++ b/frontend/src/services/graphql/DatasetTable/getSharedDatasetTables.js @@ -0,0 +1,16 @@ +import { gql } from 'apollo-boost'; + +export const getSharedDatasetTables = ({ datasetUri, envUri }) => ({ + variables: { + datasetUri, + envUri + }, + query: gql` + query GetSharedDatasetTables($datasetUri: String!, $envUri: String!) { + getSharedDatasetTables(datasetUri: $datasetUri, envUri: $envUri) { + tableUri + GlueTableName + } + } + ` +}); diff --git a/frontend/src/services/graphql/DatasetTable/index.js b/frontend/src/services/graphql/DatasetTable/index.js new file mode 100644 index 000000000..7c46f5143 --- /dev/null +++ b/frontend/src/services/graphql/DatasetTable/index.js @@ -0,0 +1,3 @@ +export * from './deleteDatasetTable'; +export * from './getSharedDatasetTables'; +export * from './listDatasetTableColumns'; diff --git a/frontend/src/api/DatasetTable/listDatasetTableColumns.js b/frontend/src/services/graphql/DatasetTable/listDatasetTableColumns.js similarity index 88% rename from frontend/src/api/DatasetTable/listDatasetTableColumns.js rename to frontend/src/services/graphql/DatasetTable/listDatasetTableColumns.js index 187c87d10..9d6d55346 100644 --- a/frontend/src/api/DatasetTable/listDatasetTableColumns.js +++ b/frontend/src/services/graphql/DatasetTable/listDatasetTableColumns.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listDatasetTableColumns = ({ tableUri, filter }) => ({ +export const listDatasetTableColumns = ({ tableUri, filter }) => ({ variables: { tableUri, filter @@ -42,5 +42,3 @@ const listDatasetTableColumns = ({ tableUri, filter }) => ({ } ` }); - -export default listDatasetTableColumns; diff --git a/frontend/src/api/Dataset/addDatasetStorageLocation.js b/frontend/src/services/graphql/Datasets/addDatasetStorageLocation.js similarity index 76% rename from frontend/src/api/Dataset/addDatasetStorageLocation.js rename to frontend/src/services/graphql/Datasets/addDatasetStorageLocation.js index 25971210f..4c9d1abdf 100644 --- a/frontend/src/api/Dataset/addDatasetStorageLocation.js +++ b/frontend/src/services/graphql/Datasets/addDatasetStorageLocation.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const addDatasetStorageLocation = ({ datasetUri, input }) => ({ +export const addDatasetStorageLocation = ({ datasetUri, input }) => ({ variables: { datasetUri, input }, mutation: gql` mutation CreateDatasetStorageLocation( @@ -14,5 +14,3 @@ const addDatasetStorageLocation = ({ datasetUri, input }) => ({ } ` }); - -export default addDatasetStorageLocation; diff --git a/frontend/src/api/Dataset/getDataset.js b/frontend/src/services/graphql/Datasets/getDataset.js similarity index 96% rename from frontend/src/api/Dataset/getDataset.js rename to frontend/src/services/graphql/Datasets/getDataset.js index aeb71fb74..50004adca 100644 --- a/frontend/src/api/Dataset/getDataset.js +++ b/frontend/src/services/graphql/Datasets/getDataset.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getDataset = (datasetUri) => ({ +export const getDataset = (datasetUri) => ({ variables: { datasetUri }, @@ -79,5 +79,3 @@ const getDataset = (datasetUri) => ({ } ` }); - -export default getDataset; diff --git a/frontend/src/services/graphql/Datasets/getDatasetAssumeRoleUrl.js b/frontend/src/services/graphql/Datasets/getDatasetAssumeRoleUrl.js new file mode 100644 index 000000000..809a8d3ef --- /dev/null +++ b/frontend/src/services/graphql/Datasets/getDatasetAssumeRoleUrl.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const getDatasetAssumeRoleUrl = (datasetUri) => ({ + variables: { + datasetUri + }, + query: gql` + query GetDatasetAssumeRoleUrl($datasetUri: String!) { + getDatasetAssumeRoleUrl(datasetUri: $datasetUri) + } + ` +}); diff --git a/frontend/src/services/graphql/Datasets/index.js b/frontend/src/services/graphql/Datasets/index.js new file mode 100644 index 000000000..c9f35114e --- /dev/null +++ b/frontend/src/services/graphql/Datasets/index.js @@ -0,0 +1,6 @@ +export * from './addDatasetStorageLocation'; +export * from './getDataset'; +export * from './getDatasetAssumeRoleUrl'; +export * from './listDatasetTables'; +export * from './listShareObjects'; +export * from './removeDatasetStorageLocation'; diff --git a/frontend/src/api/Dataset/listDatasetTables.js b/frontend/src/services/graphql/Datasets/listDatasetTables.js similarity index 89% rename from frontend/src/api/Dataset/listDatasetTables.js rename to frontend/src/services/graphql/Datasets/listDatasetTables.js index 5dab73bd4..343e80461 100644 --- a/frontend/src/api/Dataset/listDatasetTables.js +++ b/frontend/src/services/graphql/Datasets/listDatasetTables.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listDatasetTables = ({ datasetUri, filter }) => ({ +export const listDatasetTables = ({ datasetUri, filter }) => ({ variables: { datasetUri, filter @@ -38,5 +38,3 @@ const listDatasetTables = ({ datasetUri, filter }) => ({ } ` }); - -export default listDatasetTables; diff --git a/frontend/src/api/Dataset/listShareObjects.js b/frontend/src/services/graphql/Datasets/listShareObjects.js similarity index 92% rename from frontend/src/api/Dataset/listShareObjects.js rename to frontend/src/services/graphql/Datasets/listShareObjects.js index 277cbb7da..00e88d123 100644 --- a/frontend/src/api/Dataset/listShareObjects.js +++ b/frontend/src/services/graphql/Datasets/listShareObjects.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listDatasetShareObjects = ({ datasetUri, filter }) => ({ +export const listDatasetShareObjects = ({ datasetUri, filter }) => ({ variables: { datasetUri, filter @@ -59,5 +59,3 @@ const listDatasetShareObjects = ({ datasetUri, filter }) => ({ } ` }); - -export default listDatasetShareObjects; diff --git a/frontend/src/services/graphql/Datasets/removeDatasetStorageLocation.js b/frontend/src/services/graphql/Datasets/removeDatasetStorageLocation.js new file mode 100644 index 000000000..e67fe5906 --- /dev/null +++ b/frontend/src/services/graphql/Datasets/removeDatasetStorageLocation.js @@ -0,0 +1,10 @@ +import { gql } from 'apollo-boost'; + +export const deleteDatasetStorageLocation = ({ locationUri }) => ({ + variables: { locationUri }, + mutation: gql` + mutation DeleteDatasetStorageLocation($locationUri: String) { + deleteDatasetStorageLocation(locationUri: $locationUri) + } + ` +}); diff --git a/frontend/src/services/graphql/Environment/getTrustAccount.js b/frontend/src/services/graphql/Environment/getTrustAccount.js new file mode 100644 index 000000000..97aba5e40 --- /dev/null +++ b/frontend/src/services/graphql/Environment/getTrustAccount.js @@ -0,0 +1,9 @@ +import { gql } from 'apollo-boost'; + +export const getTrustAccount = () => ({ + query: gql` + query GetTrustAccount { + getTrustAccount + } + ` +}); diff --git a/frontend/src/services/graphql/Environment/index.js b/frontend/src/services/graphql/Environment/index.js new file mode 100644 index 000000000..da40f01ac --- /dev/null +++ b/frontend/src/services/graphql/Environment/index.js @@ -0,0 +1,6 @@ +export * from './getTrustAccount'; +export * from './listDatasetsOwnedByEnvGroup'; +export * from './listEnvironmentConsumptionRoles'; +export * from './listEnvironmentGroups'; +export * from './listEnvironments'; +export * from './searchEnvironmentDataItems'; diff --git a/frontend/src/api/Environment/listDatasetsOwnedByEnvGroup.js b/frontend/src/services/graphql/Environment/listDatasetsOwnedByEnvGroup.js similarity index 85% rename from frontend/src/api/Environment/listDatasetsOwnedByEnvGroup.js rename to frontend/src/services/graphql/Environment/listDatasetsOwnedByEnvGroup.js index 1a77d684a..ce3b36545 100644 --- a/frontend/src/api/Environment/listDatasetsOwnedByEnvGroup.js +++ b/frontend/src/services/graphql/Environment/listDatasetsOwnedByEnvGroup.js @@ -1,6 +1,10 @@ import { gql } from 'apollo-boost'; -const listDatasetsOwnedByEnvGroup = ({ filter, environmentUri, groupUri }) => ({ +export const listDatasetsOwnedByEnvGroup = ({ + filter, + environmentUri, + groupUri +}) => ({ variables: { environmentUri, groupUri, @@ -41,5 +45,3 @@ const listDatasetsOwnedByEnvGroup = ({ filter, environmentUri, groupUri }) => ({ } ` }); - -export default listDatasetsOwnedByEnvGroup; diff --git a/frontend/src/api/Environment/listEnvironmentConsumptionRoles.js b/frontend/src/services/graphql/Environment/listEnvironmentConsumptionRoles.js similarity index 82% rename from frontend/src/api/Environment/listEnvironmentConsumptionRoles.js rename to frontend/src/services/graphql/Environment/listEnvironmentConsumptionRoles.js index de536cb7a..4d876ab88 100644 --- a/frontend/src/api/Environment/listEnvironmentConsumptionRoles.js +++ b/frontend/src/services/graphql/Environment/listEnvironmentConsumptionRoles.js @@ -1,6 +1,9 @@ import { gql } from 'apollo-boost'; -const listEnvironmentConsumptionRoles = ({ filter, environmentUri }) => ({ +export const listEnvironmentConsumptionRoles = ({ + filter, + environmentUri +}) => ({ variables: { environmentUri, filter @@ -30,5 +33,3 @@ const listEnvironmentConsumptionRoles = ({ filter, environmentUri }) => ({ } ` }); - -export default listEnvironmentConsumptionRoles; diff --git a/frontend/src/api/Environment/listEnvironmentGroups.js b/frontend/src/services/graphql/Environment/listEnvironmentGroups.js similarity index 86% rename from frontend/src/api/Environment/listEnvironmentGroups.js rename to frontend/src/services/graphql/Environment/listEnvironmentGroups.js index 31acdf7c0..1d2360a86 100644 --- a/frontend/src/api/Environment/listEnvironmentGroups.js +++ b/frontend/src/services/graphql/Environment/listEnvironmentGroups.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listEnvironmentInvitedGroups = ({ filter, environmentUri }) => ({ +export const listEnvironmentGroups = ({ filter, environmentUri }) => ({ variables: { environmentUri, filter @@ -33,5 +33,3 @@ const listEnvironmentInvitedGroups = ({ filter, environmentUri }) => ({ } ` }); - -export default listEnvironmentInvitedGroups; diff --git a/frontend/src/api/Environment/listEnvironments.js b/frontend/src/services/graphql/Environment/listEnvironments.js similarity index 92% rename from frontend/src/api/Environment/listEnvironments.js rename to frontend/src/services/graphql/Environment/listEnvironments.js index 7c42bbea3..1becf11d7 100644 --- a/frontend/src/api/Environment/listEnvironments.js +++ b/frontend/src/services/graphql/Environment/listEnvironments.js @@ -1,6 +1,6 @@ import gql from 'graphql-tag'; -const listEnvironments = ({ filter }) => ({ +export const listEnvironments = ({ filter }) => ({ variables: { filter }, @@ -52,5 +52,3 @@ const listEnvironments = ({ filter }) => ({ } ` }); - -export default listEnvironments; diff --git a/frontend/src/services/graphql/Environment/searchEnvironmentDataItems.js b/frontend/src/services/graphql/Environment/searchEnvironmentDataItems.js new file mode 100644 index 000000000..ac53759ba --- /dev/null +++ b/frontend/src/services/graphql/Environment/searchEnvironmentDataItems.js @@ -0,0 +1,41 @@ +import { gql } from 'apollo-boost'; + +export const searchEnvironmentDataItems = ({ filter, environmentUri }) => ({ + variables: { + environmentUri, + filter + }, + query: gql` + query SearchEnvironmentDataItems( + $filter: EnvironmentDataItemFilter + $environmentUri: String + ) { + searchEnvironmentDataItems( + environmentUri: $environmentUri + filter: $filter + ) { + count + page + pages + hasNext + hasPrevious + nodes { + shareUri + environmentName + environmentUri + organizationName + organizationUri + datasetUri + datasetName + itemType + itemAccess + GlueDatabaseName + GlueTableName + S3AccessPointName + created + principalId + } + } + } + ` +}); diff --git a/frontend/src/services/graphql/Feed/index.js b/frontend/src/services/graphql/Feed/index.js new file mode 100644 index 000000000..82d393d66 --- /dev/null +++ b/frontend/src/services/graphql/Feed/index.js @@ -0,0 +1,2 @@ +export * from './listFeedMessages'; +export * from './postMessage'; diff --git a/frontend/src/api/Feed/listFeedMessages.js b/frontend/src/services/graphql/Feed/listFeedMessages.js similarity index 90% rename from frontend/src/api/Feed/listFeedMessages.js rename to frontend/src/services/graphql/Feed/listFeedMessages.js index 20828acdb..01abcce6a 100644 --- a/frontend/src/api/Feed/listFeedMessages.js +++ b/frontend/src/services/graphql/Feed/listFeedMessages.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listFeedMessages = ({ targetUri, targetType, filter }) => ({ +export const listFeedMessages = ({ targetUri, targetType, filter }) => ({ variables: { targetUri, targetType, @@ -51,5 +51,3 @@ const listFeedMessages = ({ targetUri, targetType, filter }) => ({ } ` }); - -export default listFeedMessages; diff --git a/frontend/src/api/Feed/postMessage.js b/frontend/src/services/graphql/Feed/postMessage.js similarity index 82% rename from frontend/src/api/Feed/postMessage.js rename to frontend/src/services/graphql/Feed/postMessage.js index 62d562263..c8d333565 100644 --- a/frontend/src/api/Feed/postMessage.js +++ b/frontend/src/services/graphql/Feed/postMessage.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const postFeedMessage = ({ targetUri, targetType, input }) => ({ +export const postFeedMessage = ({ targetUri, targetType, input }) => ({ variables: { targetUri, targetType, @@ -25,5 +25,3 @@ const postFeedMessage = ({ targetUri, targetType, input }) => ({ } ` }); - -export default postFeedMessage; diff --git a/frontend/src/services/graphql/Glossary/index.js b/frontend/src/services/graphql/Glossary/index.js new file mode 100644 index 000000000..d92d21185 --- /dev/null +++ b/frontend/src/services/graphql/Glossary/index.js @@ -0,0 +1 @@ +export * from './searchGlossary'; diff --git a/frontend/src/api/Glossary/searchGlossary.js b/frontend/src/services/graphql/Glossary/searchGlossary.js similarity index 92% rename from frontend/src/api/Glossary/searchGlossary.js rename to frontend/src/services/graphql/Glossary/searchGlossary.js index 699861fff..79550fd1f 100644 --- a/frontend/src/api/Glossary/searchGlossary.js +++ b/frontend/src/services/graphql/Glossary/searchGlossary.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const searchGlossary = (filter) => ({ +export const searchGlossary = (filter) => ({ variables: { filter }, @@ -45,5 +45,3 @@ const searchGlossary = (filter) => ({ } ` }); - -export default searchGlossary; diff --git a/frontend/src/services/graphql/Groups/index.js b/frontend/src/services/graphql/Groups/index.js new file mode 100644 index 000000000..e99b1948f --- /dev/null +++ b/frontend/src/services/graphql/Groups/index.js @@ -0,0 +1 @@ +export * from './listCognitoGroups'; diff --git a/frontend/src/services/graphql/Groups/listCognitoGroups.js b/frontend/src/services/graphql/Groups/listCognitoGroups.js new file mode 100644 index 000000000..a634106e4 --- /dev/null +++ b/frontend/src/services/graphql/Groups/listCognitoGroups.js @@ -0,0 +1,14 @@ +import { gql } from 'apollo-boost'; + +export const listCognitoGroups = ({ filter }) => ({ + variables: { + filter + }, + query: gql` + query listCognitoGroups($filter: CognitoGroupFilter) { + listCognitoGroups(filter: $filter) { + groupName + } + } + ` +}); diff --git a/frontend/src/services/graphql/KeyValueTags/index.js b/frontend/src/services/graphql/KeyValueTags/index.js new file mode 100644 index 000000000..57ae1cf83 --- /dev/null +++ b/frontend/src/services/graphql/KeyValueTags/index.js @@ -0,0 +1,2 @@ +export * from './listKeyValueTags'; +export * from './updateKeyValueTags'; diff --git a/frontend/src/api/KeyValueTags/listKeyValueTags.js b/frontend/src/services/graphql/KeyValueTags/listKeyValueTags.js similarity index 80% rename from frontend/src/api/KeyValueTags/listKeyValueTags.js rename to frontend/src/services/graphql/KeyValueTags/listKeyValueTags.js index ef31f29ee..9ca68df24 100644 --- a/frontend/src/api/KeyValueTags/listKeyValueTags.js +++ b/frontend/src/services/graphql/KeyValueTags/listKeyValueTags.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listKeyValueTags = (targetUri, targetType) => ({ +export const listKeyValueTags = (targetUri, targetType) => ({ variables: { targetUri, targetType @@ -18,5 +18,3 @@ const listKeyValueTags = (targetUri, targetType) => ({ } ` }); - -export default listKeyValueTags; diff --git a/frontend/src/api/KeyValueTags/updateKeyValueTags.js b/frontend/src/services/graphql/KeyValueTags/updateKeyValueTags.js similarity index 80% rename from frontend/src/api/KeyValueTags/updateKeyValueTags.js rename to frontend/src/services/graphql/KeyValueTags/updateKeyValueTags.js index a327d78ed..5aeadcba9 100644 --- a/frontend/src/api/KeyValueTags/updateKeyValueTags.js +++ b/frontend/src/services/graphql/KeyValueTags/updateKeyValueTags.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateKeyValueTags = (input) => ({ +export const updateKeyValueTags = (input) => ({ variables: { input }, @@ -17,5 +17,3 @@ const updateKeyValueTags = (input) => ({ } ` }); - -export default updateKeyValueTags; diff --git a/frontend/src/api/Metric/getMetrics.js b/frontend/src/services/graphql/Metric/getMetrics.js similarity index 85% rename from frontend/src/api/Metric/getMetrics.js rename to frontend/src/services/graphql/Metric/getMetrics.js index a8058347d..b4a2b6fbb 100644 --- a/frontend/src/api/Metric/getMetrics.js +++ b/frontend/src/services/graphql/Metric/getMetrics.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getMetrics = (filter) => ({ +export const getMetrics = (filter) => ({ variables: { filter }, @@ -22,5 +22,3 @@ const getMetrics = (filter) => ({ } ` }); - -export default getMetrics; diff --git a/frontend/src/services/graphql/Metric/index.js b/frontend/src/services/graphql/Metric/index.js new file mode 100644 index 000000000..943e5e26d --- /dev/null +++ b/frontend/src/services/graphql/Metric/index.js @@ -0,0 +1 @@ +export * from './getMetrics'; diff --git a/frontend/src/services/graphql/Notification/archiveNotification.js b/frontend/src/services/graphql/Notification/archiveNotification.js new file mode 100644 index 000000000..e6b719055 --- /dev/null +++ b/frontend/src/services/graphql/Notification/archiveNotification.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const archiveNotification = ({ notificationUri }) => ({ + variables: { + notificationUri + }, + mutation: gql` + mutation deleteNotification($notificationUri: String!) { + deleteNotification(notificationUri: $notificationUri) + } + ` +}); diff --git a/frontend/src/services/graphql/Notification/countDeletedNotifications.js b/frontend/src/services/graphql/Notification/countDeletedNotifications.js new file mode 100644 index 000000000..42ea4f4e9 --- /dev/null +++ b/frontend/src/services/graphql/Notification/countDeletedNotifications.js @@ -0,0 +1,10 @@ +import { gql } from 'apollo-boost'; + +export const countDeletedNotifications = () => ({ + variables: {}, + query: gql` + query countDeletedNotifications { + countDeletedNotifications + } + ` +}); diff --git a/frontend/src/services/graphql/Notification/countReadNotifications.js b/frontend/src/services/graphql/Notification/countReadNotifications.js new file mode 100644 index 000000000..dcd855237 --- /dev/null +++ b/frontend/src/services/graphql/Notification/countReadNotifications.js @@ -0,0 +1,10 @@ +import { gql } from 'apollo-boost'; + +export const countReadNotifications = () => ({ + variables: {}, + query: gql` + query countReadNotifications { + countReadNotifications + } + ` +}); diff --git a/frontend/src/services/graphql/Notification/countUnreadNotifications.js b/frontend/src/services/graphql/Notification/countUnreadNotifications.js new file mode 100644 index 000000000..816f2c07e --- /dev/null +++ b/frontend/src/services/graphql/Notification/countUnreadNotifications.js @@ -0,0 +1,10 @@ +import { gql } from 'apollo-boost'; + +export const countUnreadNotifications = () => ({ + variables: {}, + query: gql` + query countUnreadNotifications { + countUnreadNotifications + } + ` +}); diff --git a/frontend/src/services/graphql/Notification/index.js b/frontend/src/services/graphql/Notification/index.js new file mode 100644 index 000000000..b050f4df8 --- /dev/null +++ b/frontend/src/services/graphql/Notification/index.js @@ -0,0 +1,6 @@ +export * from './archiveNotification'; +export * from './countDeletedNotifications'; +export * from './countReadNotifications'; +export * from './countUnreadNotifications'; +export * from './listNotifications'; +export * from './markAsRead'; diff --git a/frontend/src/api/Notification/listNotifications.js b/frontend/src/services/graphql/Notification/listNotifications.js similarity index 83% rename from frontend/src/api/Notification/listNotifications.js rename to frontend/src/services/graphql/Notification/listNotifications.js index 3b50b58dd..616482568 100644 --- a/frontend/src/api/Notification/listNotifications.js +++ b/frontend/src/services/graphql/Notification/listNotifications.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listNotifications = (filter) => ({ +export const listNotifications = (filter) => ({ variables: { filter }, @@ -22,5 +22,3 @@ const listNotifications = (filter) => ({ } ` }); - -export default listNotifications; diff --git a/frontend/src/services/graphql/Notification/markAsRead.js b/frontend/src/services/graphql/Notification/markAsRead.js new file mode 100644 index 000000000..5399b5a99 --- /dev/null +++ b/frontend/src/services/graphql/Notification/markAsRead.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const markNotificationAsRead = (notificationUri) => ({ + variables: { + notificationUri + }, + mutation: gql` + mutation markNotificationAsRead($notificationUri: String!) { + markNotificationAsRead(notificationUri: $notificationUri) + } + ` +}); diff --git a/frontend/src/api/Organization/getOrganization.js b/frontend/src/services/graphql/Organization/getOrganization.js similarity index 84% rename from frontend/src/api/Organization/getOrganization.js rename to frontend/src/services/graphql/Organization/getOrganization.js index dc5dc9934..1c4b00088 100644 --- a/frontend/src/api/Organization/getOrganization.js +++ b/frontend/src/services/graphql/Organization/getOrganization.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getOrganization = (organizationUri) => ({ +export const getOrganization = (organizationUri) => ({ variables: { organizationUri }, query: gql` query GetOrganization($organizationUri: String!) { @@ -21,5 +21,3 @@ const getOrganization = (organizationUri) => ({ } ` }); - -export default getOrganization; diff --git a/frontend/src/services/graphql/Organization/index.js b/frontend/src/services/graphql/Organization/index.js new file mode 100644 index 000000000..8ae19d1b5 --- /dev/null +++ b/frontend/src/services/graphql/Organization/index.js @@ -0,0 +1 @@ +export * from './getOrganization'; diff --git a/frontend/src/services/graphql/Principal/index.js b/frontend/src/services/graphql/Principal/index.js new file mode 100644 index 000000000..1945f5588 --- /dev/null +++ b/frontend/src/services/graphql/Principal/index.js @@ -0,0 +1 @@ +export * from './searchPrincipal'; diff --git a/frontend/src/api/Principal/searchPrincipal.js b/frontend/src/services/graphql/Principal/searchPrincipal.js similarity index 87% rename from frontend/src/api/Principal/searchPrincipal.js rename to frontend/src/services/graphql/Principal/searchPrincipal.js index d6e475027..ca0d8cb55 100644 --- a/frontend/src/api/Principal/searchPrincipal.js +++ b/frontend/src/services/graphql/Principal/searchPrincipal.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const searchPrincipal = ({ filter }) => ({ +export const searchPrincipal = ({ filter }) => ({ variables: { filter }, @@ -28,5 +28,3 @@ const searchPrincipal = ({ filter }) => ({ } ` }); - -export default searchPrincipal; diff --git a/frontend/src/api/SavedQuery/createSavedQuery.js b/frontend/src/services/graphql/SavedQuery/createSavedQuery.js similarity index 81% rename from frontend/src/api/SavedQuery/createSavedQuery.js rename to frontend/src/services/graphql/SavedQuery/createSavedQuery.js index 6529d0bb5..53c41273f 100644 --- a/frontend/src/api/SavedQuery/createSavedQuery.js +++ b/frontend/src/services/graphql/SavedQuery/createSavedQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createSavedQuery = ({ scheduledQueryUri, input }) => ({ +export const createSavedQuery = ({ scheduledQueryUri, input }) => ({ variables: { scheduledQueryUri, input @@ -21,5 +21,3 @@ const createSavedQuery = ({ scheduledQueryUri, input }) => ({ } ` }); - -export default createSavedQuery; diff --git a/frontend/src/api/SavedQuery/createScheduledQuery.js b/frontend/src/services/graphql/SavedQuery/createScheduledQuery.js similarity index 79% rename from frontend/src/api/SavedQuery/createScheduledQuery.js rename to frontend/src/services/graphql/SavedQuery/createScheduledQuery.js index fdd4e78b1..e8ebcaa88 100644 --- a/frontend/src/api/SavedQuery/createScheduledQuery.js +++ b/frontend/src/services/graphql/SavedQuery/createScheduledQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const createScheduledQuery = (input) => ({ +export const createScheduledQuery = (input) => ({ variables: { input }, @@ -17,5 +17,3 @@ const createScheduledQuery = (input) => ({ } ` }); - -export default createScheduledQuery; diff --git a/frontend/src/services/graphql/SavedQuery/deployScheduledQuery.js b/frontend/src/services/graphql/SavedQuery/deployScheduledQuery.js new file mode 100644 index 000000000..147228418 --- /dev/null +++ b/frontend/src/services/graphql/SavedQuery/deployScheduledQuery.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const deployScheduledQuery = (scheduledQueryUri) => ({ + variables: { + scheduledQueryUri + }, + mutation: gql` + mutation DeployScheduledQuery($scheduledQueryUri: String!) { + deployScheduledQuery(scheduledQueryUri: $scheduledQueryUri) + } + ` +}); diff --git a/frontend/src/api/SavedQuery/getSavedQuery.js b/frontend/src/services/graphql/SavedQuery/getSavedQuery.js similarity index 83% rename from frontend/src/api/SavedQuery/getSavedQuery.js rename to frontend/src/services/graphql/SavedQuery/getSavedQuery.js index 78d5a8c98..4f11e429c 100644 --- a/frontend/src/api/SavedQuery/getSavedQuery.js +++ b/frontend/src/services/graphql/SavedQuery/getSavedQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getSavedQuery = (queryUri) => ({ +export const getSavedQuery = (queryUri) => ({ variables: { queryUri }, @@ -21,5 +21,3 @@ const getSavedQuery = (queryUri) => ({ } ` }); - -export default getSavedQuery; diff --git a/frontend/src/api/SavedQuery/getScheduledQuery.js b/frontend/src/services/graphql/SavedQuery/getScheduledQuery.js similarity index 87% rename from frontend/src/api/SavedQuery/getScheduledQuery.js rename to frontend/src/services/graphql/SavedQuery/getScheduledQuery.js index 11bb4166e..f2d36fac2 100644 --- a/frontend/src/api/SavedQuery/getScheduledQuery.js +++ b/frontend/src/services/graphql/SavedQuery/getScheduledQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getScheduledQuery = (scheduledQueryUri) => ({ +export const getScheduledQuery = (scheduledQueryUri) => ({ variables: { scheduledQueryUri }, @@ -32,5 +32,3 @@ const getScheduledQuery = (scheduledQueryUri) => ({ } ` }); - -export default getScheduledQuery; diff --git a/frontend/src/services/graphql/SavedQuery/index.js b/frontend/src/services/graphql/SavedQuery/index.js new file mode 100644 index 000000000..e9d27077a --- /dev/null +++ b/frontend/src/services/graphql/SavedQuery/index.js @@ -0,0 +1,12 @@ +export * from './createSavedQuery'; +export * from './createScheduledQuery'; +export * from './deployScheduledQuery'; +export * from './getSavedQuery'; +export * from './getScheduledQuery'; +export * from './listSavedQueries'; +export * from './listScheduledQueries'; +export * from './listScheduledQueryExecutions'; +export * from './removeSavedQuery'; +export * from './runSavedQuery'; +export * from './runScheduledQuery'; +export * from './updateSavedQuery'; diff --git a/frontend/src/api/SavedQuery/listSavedQueries.js b/frontend/src/services/graphql/SavedQuery/listSavedQueries.js similarity index 85% rename from frontend/src/api/SavedQuery/listSavedQueries.js rename to frontend/src/services/graphql/SavedQuery/listSavedQueries.js index 5c4c9e29a..14636ef30 100644 --- a/frontend/src/api/SavedQuery/listSavedQueries.js +++ b/frontend/src/services/graphql/SavedQuery/listSavedQueries.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listSavedQueries = (filter) => ({ +export const listSavedQueries = (filter) => ({ variables: { filter }, @@ -25,5 +25,3 @@ const listSavedQueries = (filter) => ({ } ` }); - -export default listSavedQueries; diff --git a/frontend/src/api/SavedQuery/listScheduledQueries.js b/frontend/src/services/graphql/SavedQuery/listScheduledQueries.js similarity index 88% rename from frontend/src/api/SavedQuery/listScheduledQueries.js rename to frontend/src/services/graphql/SavedQuery/listScheduledQueries.js index fd8a364a2..2d9caecaf 100644 --- a/frontend/src/api/SavedQuery/listScheduledQueries.js +++ b/frontend/src/services/graphql/SavedQuery/listScheduledQueries.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listScheduledQueries = (filter) => ({ +export const listScheduledQueries = (filter) => ({ variables: { filter }, @@ -35,5 +35,3 @@ const listScheduledQueries = (filter) => ({ } ` }); - -export default listScheduledQueries; diff --git a/frontend/src/api/SavedQuery/listScheduledQueryExecutions.js b/frontend/src/services/graphql/SavedQuery/listScheduledQueryExecutions.js similarity index 75% rename from frontend/src/api/SavedQuery/listScheduledQueryExecutions.js rename to frontend/src/services/graphql/SavedQuery/listScheduledQueryExecutions.js index b98ed6fba..92d37a03a 100644 --- a/frontend/src/api/SavedQuery/listScheduledQueryExecutions.js +++ b/frontend/src/services/graphql/SavedQuery/listScheduledQueryExecutions.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const listScheduledQueryExecutions = (scheduledQueryUri) => ({ +export const listScheduledQueryExecutions = (scheduledQueryUri) => ({ variables: { scheduledQueryUri }, @@ -15,5 +15,3 @@ const listScheduledQueryExecutions = (scheduledQueryUri) => ({ } ` }); - -export default listScheduledQueryExecutions; diff --git a/frontend/src/services/graphql/SavedQuery/removeSavedQuery.js b/frontend/src/services/graphql/SavedQuery/removeSavedQuery.js new file mode 100644 index 000000000..209112f39 --- /dev/null +++ b/frontend/src/services/graphql/SavedQuery/removeSavedQuery.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const removeSavedQuery = (queryUri) => ({ + variables: { + queryUri + }, + mutation: gql` + mutation RemoveSavedQuery($queryUri: String!) { + removeSavedQuery(savedQueryUri: $queryUri) + } + ` +}); diff --git a/frontend/src/api/SavedQuery/runSavedQuery.js b/frontend/src/services/graphql/SavedQuery/runSavedQuery.js similarity index 82% rename from frontend/src/api/SavedQuery/runSavedQuery.js rename to frontend/src/services/graphql/SavedQuery/runSavedQuery.js index df71cf4d6..90eb77ee2 100644 --- a/frontend/src/api/SavedQuery/runSavedQuery.js +++ b/frontend/src/services/graphql/SavedQuery/runSavedQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const runSavedQuery = ({ savedQueryUri, sqlBody }) => ({ +export const runSavedQuery = ({ savedQueryUri, sqlBody }) => ({ variables: { savedQueryUri, // environmentUri: environmentUri, @@ -20,5 +20,3 @@ const runSavedQuery = ({ savedQueryUri, sqlBody }) => ({ } ` }); - -export default runSavedQuery; diff --git a/frontend/src/services/graphql/SavedQuery/runScheduledQuery.js b/frontend/src/services/graphql/SavedQuery/runScheduledQuery.js new file mode 100644 index 000000000..e8306377a --- /dev/null +++ b/frontend/src/services/graphql/SavedQuery/runScheduledQuery.js @@ -0,0 +1,12 @@ +import { gql } from 'apollo-boost'; + +export const runScheduledQuery = (scheduledQueryUri) => ({ + variables: { + scheduledQueryUri + }, + mutation: gql` + mutation RunScheduledQuery($scheduledQueryUri: String!) { + runScheduledQuery(scheduledQueryUri: $scheduledQueryUri) + } + ` +}); diff --git a/frontend/src/api/SavedQuery/updateSavedQuery.js b/frontend/src/services/graphql/SavedQuery/updateSavedQuery.js similarity index 81% rename from frontend/src/api/SavedQuery/updateSavedQuery.js rename to frontend/src/services/graphql/SavedQuery/updateSavedQuery.js index 48cc05e79..ddb0ee3c5 100644 --- a/frontend/src/api/SavedQuery/updateSavedQuery.js +++ b/frontend/src/services/graphql/SavedQuery/updateSavedQuery.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateSavedQuery = ({ queryUri, input }) => ({ +export const updateSavedQuery = ({ queryUri, input }) => ({ variables: { queryUri, input @@ -22,5 +22,3 @@ const updateSavedQuery = ({ queryUri, input }) => ({ } ` }); - -export default updateSavedQuery; diff --git a/frontend/src/services/graphql/Search/index.js b/frontend/src/services/graphql/Search/index.js new file mode 100644 index 000000000..09aacdfbc --- /dev/null +++ b/frontend/src/services/graphql/Search/index.js @@ -0,0 +1 @@ +export * from './searchResources'; diff --git a/frontend/src/api/Search/searchResources.js b/frontend/src/services/graphql/Search/searchResources.js similarity index 84% rename from frontend/src/api/Search/searchResources.js rename to frontend/src/services/graphql/Search/searchResources.js index 97be21bbe..de47b378d 100644 --- a/frontend/src/api/Search/searchResources.js +++ b/frontend/src/services/graphql/Search/searchResources.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const SearchResources = (filter) => ({ +export const SearchResources = (filter) => ({ variables: { filter }, @@ -23,5 +23,3 @@ const SearchResources = (filter) => ({ } ` }); - -export default SearchResources; diff --git a/frontend/src/api/ShareObject/createShareObject.js b/frontend/src/services/graphql/ShareObject/createShareObject.js similarity index 79% rename from frontend/src/api/ShareObject/createShareObject.js rename to frontend/src/services/graphql/ShareObject/createShareObject.js index ee2d66352..f82335e56 100644 --- a/frontend/src/api/ShareObject/createShareObject.js +++ b/frontend/src/services/graphql/ShareObject/createShareObject.js @@ -1,7 +1,6 @@ import { gql } from 'apollo-boost'; -const createShareObject = ({ datasetUri, itemUri, itemType, input }) => { - console.log('rcv', input); +export const createShareObject = ({ datasetUri, itemUri, itemType, input }) => { return { variables: { datasetUri, @@ -29,5 +28,3 @@ const createShareObject = ({ datasetUri, itemUri, itemType, input }) => { ` }; }; - -export default createShareObject; diff --git a/frontend/src/api/ShareObject/getShareRequestsToMe.js b/frontend/src/services/graphql/ShareObject/getShareRequestsToMe.js similarity index 92% rename from frontend/src/api/ShareObject/getShareRequestsToMe.js rename to frontend/src/services/graphql/ShareObject/getShareRequestsToMe.js index 8e2c7461a..75b2242f4 100644 --- a/frontend/src/api/ShareObject/getShareRequestsToMe.js +++ b/frontend/src/services/graphql/ShareObject/getShareRequestsToMe.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getShareRequestsToMe = ({ filter }) => ({ +export const getShareRequestsToMe = ({ filter }) => ({ variables: { filter }, query: gql` query getShareRequestsToMe($filter: ShareObjectFilter) { @@ -50,5 +50,3 @@ const getShareRequestsToMe = ({ filter }) => ({ } ` }); - -export default getShareRequestsToMe; diff --git a/frontend/src/services/graphql/ShareObject/index.js b/frontend/src/services/graphql/ShareObject/index.js new file mode 100644 index 000000000..033e2b72f --- /dev/null +++ b/frontend/src/services/graphql/ShareObject/index.js @@ -0,0 +1,2 @@ +export * from './createShareObject'; +export * from './getShareRequestsToMe'; diff --git a/frontend/src/api/Stack/getStack.js b/frontend/src/services/graphql/Stack/getStack.js similarity index 85% rename from frontend/src/api/Stack/getStack.js rename to frontend/src/services/graphql/Stack/getStack.js index 2973b875c..e432ec53e 100644 --- a/frontend/src/api/Stack/getStack.js +++ b/frontend/src/services/graphql/Stack/getStack.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getStack = (environmentUri, stackUri) => ({ +export const getStack = (environmentUri, stackUri) => ({ variables: { environmentUri, stackUri @@ -24,5 +24,3 @@ const getStack = (environmentUri, stackUri) => ({ } ` }); - -export default getStack; diff --git a/frontend/src/api/Stack/getStackLogs.js b/frontend/src/services/graphql/Stack/getStackLogs.js similarity index 78% rename from frontend/src/api/Stack/getStackLogs.js rename to frontend/src/services/graphql/Stack/getStackLogs.js index 61a89a924..01ee69dce 100644 --- a/frontend/src/api/Stack/getStackLogs.js +++ b/frontend/src/services/graphql/Stack/getStackLogs.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getStackLogs = (environmentUri, stackUri) => ({ +export const getStackLogs = (environmentUri, stackUri) => ({ variables: { environmentUri, stackUri @@ -14,5 +14,3 @@ const getStackLogs = (environmentUri, stackUri) => ({ } ` }); - -export default getStackLogs; diff --git a/frontend/src/services/graphql/Stack/index.js b/frontend/src/services/graphql/Stack/index.js new file mode 100644 index 000000000..b4a905d1f --- /dev/null +++ b/frontend/src/services/graphql/Stack/index.js @@ -0,0 +1,3 @@ +export * from './getStack'; +export * from './getStackLogs'; +export * from './updateStack'; diff --git a/frontend/src/api/Stack/updateStack.js b/frontend/src/services/graphql/Stack/updateStack.js similarity index 79% rename from frontend/src/api/Stack/updateStack.js rename to frontend/src/services/graphql/Stack/updateStack.js index 889a9676c..cc8b81637 100644 --- a/frontend/src/api/Stack/updateStack.js +++ b/frontend/src/services/graphql/Stack/updateStack.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const updateStack = (targetUri, targetType) => ({ +export const updateStack = (targetUri, targetType) => ({ variables: { targetUri, targetType @@ -15,5 +15,3 @@ const updateStack = (targetUri, targetType) => ({ } ` }); - -export default updateStack; diff --git a/frontend/src/services/graphql/Test/index.js b/frontend/src/services/graphql/Test/index.js new file mode 100644 index 000000000..607718c2a --- /dev/null +++ b/frontend/src/services/graphql/Test/index.js @@ -0,0 +1 @@ +export * from './test'; diff --git a/frontend/src/services/graphql/Test/test.js b/frontend/src/services/graphql/Test/test.js new file mode 100644 index 000000000..e02219109 --- /dev/null +++ b/frontend/src/services/graphql/Test/test.js @@ -0,0 +1,9 @@ +import { gql } from 'apollo-boost'; + +export const test = () => ({ + query: gql` + query Test { + test + } + ` +}); diff --git a/frontend/src/api/User/findUser.js b/frontend/src/services/graphql/User/findUser.js similarity index 84% rename from frontend/src/api/User/findUser.js rename to frontend/src/services/graphql/User/findUser.js index 916a57340..a67218ca4 100644 --- a/frontend/src/api/User/findUser.js +++ b/frontend/src/services/graphql/User/findUser.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const findUser = ({ userName, page, pageSize }) => ({ +export const findUser = ({ userName, page, pageSize }) => ({ variables: { userName, pageSize, @@ -23,5 +23,3 @@ const findUser = ({ userName, page, pageSize }) => ({ } ` }); - -export default findUser; diff --git a/frontend/src/services/graphql/User/index.js b/frontend/src/services/graphql/User/index.js new file mode 100644 index 000000000..c94bd998e --- /dev/null +++ b/frontend/src/services/graphql/User/index.js @@ -0,0 +1 @@ +export * from './findUser'; diff --git a/frontend/src/api/Vote/countUpVotes.js b/frontend/src/services/graphql/Vote/countUpVotes.js similarity index 75% rename from frontend/src/api/Vote/countUpVotes.js rename to frontend/src/services/graphql/Vote/countUpVotes.js index 63eb1cd1f..c8fbf05ea 100644 --- a/frontend/src/api/Vote/countUpVotes.js +++ b/frontend/src/services/graphql/Vote/countUpVotes.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const countUpVotes = (targetUri, targetType) => ({ +export const countUpVotes = (targetUri, targetType) => ({ variables: { targetUri, targetType @@ -11,5 +11,3 @@ const countUpVotes = (targetUri, targetType) => ({ } ` }); - -export default countUpVotes; diff --git a/frontend/src/api/Vote/getVote.js b/frontend/src/services/graphql/Vote/getVote.js similarity index 78% rename from frontend/src/api/Vote/getVote.js rename to frontend/src/services/graphql/Vote/getVote.js index 5e7c39e67..0d2873d7d 100644 --- a/frontend/src/api/Vote/getVote.js +++ b/frontend/src/services/graphql/Vote/getVote.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const getVote = (targetUri, targetType) => ({ +export const getVote = (targetUri, targetType) => ({ variables: { targetUri, targetType @@ -13,5 +13,3 @@ const getVote = (targetUri, targetType) => ({ } ` }); - -export default getVote; diff --git a/frontend/src/services/graphql/Vote/index.js b/frontend/src/services/graphql/Vote/index.js new file mode 100644 index 000000000..6e252918c --- /dev/null +++ b/frontend/src/services/graphql/Vote/index.js @@ -0,0 +1,3 @@ +export * from './countUpVotes'; +export * from './getVote'; +export * from './upVote'; diff --git a/frontend/src/api/Vote/upVote.js b/frontend/src/services/graphql/Vote/upVote.js similarity index 82% rename from frontend/src/api/Vote/upVote.js rename to frontend/src/services/graphql/Vote/upVote.js index 5b783b462..579cb30db 100644 --- a/frontend/src/api/Vote/upVote.js +++ b/frontend/src/services/graphql/Vote/upVote.js @@ -1,6 +1,6 @@ import { gql } from 'apollo-boost'; -const upVote = (input) => ({ +export const upVote = (input) => ({ variables: { input }, @@ -15,5 +15,3 @@ const upVote = (input) => ({ } ` }); - -export default upVote; diff --git a/frontend/src/services/graphql/index.js b/frontend/src/services/graphql/index.js new file mode 100644 index 000000000..8d0e00804 --- /dev/null +++ b/frontend/src/services/graphql/index.js @@ -0,0 +1,20 @@ +export * from './ApiKeys'; +export * from './Dashboard'; +export * from './Datasets'; +export * from './DatasetTable'; +export * from './Environment'; +export * from './Feed'; +export * from './Glossary'; +export * from './Groups'; +export * from './KeyValueTags'; +export * from './Metric'; +export * from './Notification'; +export * from './Organization'; +export * from './Principal'; +export * from './SavedQuery'; +export * from './Search'; +export * from './ShareObject'; +export * from './Stack'; +export * from './Test'; +export * from './User'; +export * from './Vote'; diff --git a/frontend/src/services/hooks/index.js b/frontend/src/services/hooks/index.js new file mode 100644 index 000000000..86222cbed --- /dev/null +++ b/frontend/src/services/hooks/index.js @@ -0,0 +1,2 @@ +export * from './useClient'; +export * from './useGroups'; diff --git a/frontend/src/hooks/useClient.js b/frontend/src/services/hooks/useClient.js similarity index 85% rename from frontend/src/hooks/useClient.js rename to frontend/src/services/hooks/useClient.js index ccf49d1bd..a1a9e2f01 100644 --- a/frontend/src/hooks/useClient.js +++ b/frontend/src/services/hooks/useClient.js @@ -1,15 +1,14 @@ -import { useEffect, useState } from 'react'; +import { from } from '@apollo/client'; +import { onError } from '@apollo/client/link/error'; import { ApolloClient, ApolloLink, HttpLink, InMemoryCache } from 'apollo-boost'; -import { onError } from '@apollo/client/link/error'; -import { from } from '@apollo/client'; -import useToken from './useToken'; -import { useDispatch } from '../store'; -import { SET_ERROR } from '../store/errorReducer'; +import { useEffect, useState } from 'react'; +import { useToken } from 'authentication'; +import { SET_ERROR, useDispatch } from 'globalErrors'; const defaultOptions = { watchQuery: { @@ -26,7 +25,7 @@ const defaultOptions = { } }; -const useClient = () => { +export const useClient = () => { const dispatch = useDispatch(); const [client, setClient] = useState(null); const token = useToken(); @@ -53,14 +52,14 @@ const useClient = () => { const errorLink = onError(({ graphQLErrors, networkError }) => { if (graphQLErrors) { graphQLErrors.forEach(({ message, locations, path }) => { - console.log( + console.error( `[GraphQL error]: Message: ${message}, Location: ${locations}, Path: ${path}` ); }); } if (networkError) { - console.log(`[Network error]: ${networkError}`); + console.error(`[Network error]: ${networkError}`); dispatch({ type: SET_ERROR, error: 'Network error occurred' }); } }); @@ -73,10 +72,8 @@ const useClient = () => { setClient(apolloClient); }; if (token) { - initClient().catch((e) => console.log(e)); + initClient().catch((e) => console.error(e)); } }, [token, dispatch]); return client; }; - -export default useClient; diff --git a/frontend/src/hooks/useGroups.js b/frontend/src/services/hooks/useGroups.js similarity index 88% rename from frontend/src/hooks/useGroups.js rename to frontend/src/services/hooks/useGroups.js index 7a9dbdf82..43396e4b6 100644 --- a/frontend/src/hooks/useGroups.js +++ b/frontend/src/services/hooks/useGroups.js @@ -1,9 +1,8 @@ -import { useEffect, useState } from 'react'; import { Auth } from 'aws-amplify'; -import { SET_ERROR } from '../store/errorReducer'; -import { useDispatch } from '../store'; +import { useEffect, useState } from 'react'; +import { SET_ERROR, useDispatch } from 'globalErrors'; -const useGroups = () => { +export const useGroups = () => { const dispatch = useDispatch(); const [groups, setGroups] = useState(null); const fetchGroups = async () => { @@ -34,7 +33,6 @@ const useGroups = () => { ); } }); + return groups; }; - -export default useGroups; diff --git a/frontend/src/services/index.js b/frontend/src/services/index.js new file mode 100644 index 000000000..840b9fd01 --- /dev/null +++ b/frontend/src/services/index.js @@ -0,0 +1,2 @@ +export * from './graphql'; +export * from './hooks'; diff --git a/frontend/src/store/index.js b/frontend/src/store/index.js deleted file mode 100644 index 0477ed308..000000000 --- a/frontend/src/store/index.js +++ /dev/null @@ -1,17 +0,0 @@ -import { - useDispatch as useReduxDispatch, - useSelector as useReduxSelector -} from 'react-redux'; -import { configureStore } from '@reduxjs/toolkit'; -import rootReducer from './rootReducer'; - -const store = configureStore({ - reducer: rootReducer, - devTools: process.env.REACT_APP_ENABLE_REDUX_DEV_TOOLS === 'true' -}); - -export const useSelector = useReduxSelector; - -export const useDispatch = () => useReduxDispatch(); - -export default store; diff --git a/frontend/src/store/rootReducer.js b/frontend/src/store/rootReducer.js deleted file mode 100644 index dac2f1f63..000000000 --- a/frontend/src/store/rootReducer.js +++ /dev/null @@ -1,8 +0,0 @@ -import { combineReducers } from '@reduxjs/toolkit'; -import { errorReducer } from './errorReducer'; - -const rootReducer = combineReducers({ - error: errorReducer -}); - -export default rootReducer; diff --git a/frontend/src/theme/index.js b/frontend/src/theme/index.js deleted file mode 100644 index faf244044..000000000 --- a/frontend/src/theme/index.js +++ /dev/null @@ -1,33 +0,0 @@ -import { createTheme, responsiveFontSizes } from '@mui/material/styles'; -import { baseThemeOptions } from './BaseThemeOptions'; -import { darkThemeOptions } from './DarkThemeOptions'; -import { lightThemeOptions } from './LightThemeOptions'; -import { THEMES } from '../constants'; - -export const createMaterialTheme = (config) => { - let theme = createTheme( - baseThemeOptions, - config.theme === THEMES.DARK ? darkThemeOptions : lightThemeOptions, - { - direction: config.direction - }, - { - ...(config.roundedCorners - ? { - shape: { - borderRadius: 16 - } - } - : { - shape: { - borderRadius: 8 - } - }) - } - ); - - if (config.responsiveFontSizes) { - theme = responsiveFontSizes(theme); - } - return theme; -}; diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js new file mode 100644 index 000000000..a92896faa --- /dev/null +++ b/frontend/src/utils/constants.js @@ -0,0 +1,27 @@ +export const AwsRegions = [ + { name: 'US East (Ohio)', code: 'us-east-2' }, + { name: 'US East (N. Virginia)', code: 'us-east-1' }, + { name: 'US West (N. California)', code: 'us-west-1' }, + { name: 'US West (Oregon)', code: 'us-west-2' }, + { name: 'Africa (Cape Town)', code: 'af-south-1' }, + { name: 'Asia Pacific (Hong Kong)', code: 'ap-east-1' }, + { name: 'Asia Pacific (Mumbai)', code: 'ap-south-1' }, + { name: 'Asia Pacific (Osaka-Local)', code: 'ap-northeast-3' }, + { name: 'Asia Pacific (Seoul)', code: 'ap-northeast-2' }, + { name: 'Asia Pacific (Singapore)', code: 'ap-southeast-1' }, + { name: 'Asia Pacific (Sydney)', code: 'ap-southeast-2' }, + { name: 'Asia Pacific (Tokyo)', code: 'ap-northeast-1' }, + { name: 'Canada (Central)', code: 'ca-central-1' }, + { name: 'China (Beijing)', code: 'cn-north-1' }, + { name: 'China (Ningxia)', code: 'cn-northwest-1' }, + { name: 'Europe (Frankfurt)', code: 'eu-central-1' }, + { name: 'Europe (Ireland)', code: 'eu-west-1' }, + { name: 'Europe (London)', code: 'eu-west-2' }, + { name: 'Europe (Milan)', code: 'eu-south-1' }, + { name: 'Europe (Paris)', code: 'eu-west-3' }, + { name: 'Europe (Stockholm)', code: 'eu-north-1' }, + { name: 'Middle East (Bahrain)', code: 'me-south-1' }, + { name: 'South America (São Paulo)', code: 'sa-east-1' }, + { name: 'AWS GovCloud (US-East)', code: 'us-gov-east-1' }, + { name: 'AWS GovCloud (US)', code: 'us-gov-west-1' } +]; diff --git a/frontend/src/utils/bytesToSize.js b/frontend/src/utils/helpers/bytesToSize.js similarity index 82% rename from frontend/src/utils/bytesToSize.js rename to frontend/src/utils/helpers/bytesToSize.js index a51a9bc2a..fa438da53 100644 --- a/frontend/src/utils/bytesToSize.js +++ b/frontend/src/utils/helpers/bytesToSize.js @@ -1,5 +1,5 @@ /* eslint-disable no-restricted-properties */ -const bytesToSize = (bytes, decimals = 2) => { +export const bytesToSize = (bytes, decimals = 2) => { if (bytes === 0) { return '0 Bytes'; } @@ -11,5 +11,3 @@ const bytesToSize = (bytes, decimals = 2) => { return `${parseFloat((bytes / Math.pow(k, i)).toFixed(dm))} ${sizes[i]}`; }; - -export default bytesToSize; diff --git a/frontend/src/utils/dayjs.js b/frontend/src/utils/helpers/dayjs.js similarity index 100% rename from frontend/src/utils/dayjs.js rename to frontend/src/utils/helpers/dayjs.js diff --git a/frontend/src/utils/helpers/index.js b/frontend/src/utils/helpers/index.js new file mode 100644 index 000000000..26cfd9ef2 --- /dev/null +++ b/frontend/src/utils/helpers/index.js @@ -0,0 +1,4 @@ +export * from './bytesToSize'; +export * from './dayjs'; +export * from './listToTree'; +export * from './moduleUtils'; diff --git a/frontend/src/utils/listToTree.js b/frontend/src/utils/helpers/listToTree.js similarity index 92% rename from frontend/src/utils/listToTree.js rename to frontend/src/utils/helpers/listToTree.js index 1a21b1d1f..de0e4224f 100644 --- a/frontend/src/utils/listToTree.js +++ b/frontend/src/utils/helpers/listToTree.js @@ -1,4 +1,4 @@ -const listToTree = (data, options) => { +export const listToTree = (data, options) => { options = options || {}; const ID_KEY = options.idKey || 'id'; const PARENT_KEY = options.parentKey || 'parent'; @@ -30,4 +30,3 @@ const listToTree = (data, options) => { return tree; }; -export default listToTree; diff --git a/frontend/src/utils/helpers/moduleUtils.js b/frontend/src/utils/helpers/moduleUtils.js new file mode 100644 index 000000000..3da56e999 --- /dev/null +++ b/frontend/src/utils/helpers/moduleUtils.js @@ -0,0 +1,41 @@ +/* eslint-disable no-restricted-properties */ +import config from '../../generated/config.json'; + +const ModuleNames = { + CATALOG: 'catalog', + DATASETS: 'datasets', + SHARES: 'shares', + GLOSSARIES: 'glossaries', + WORKSHEETS: 'worksheets', + NOTEBOOKS: 'notebooks', + MLSTUDIO: 'mlstudio', + PIPELINES: 'datapipelines', + DASHBOARDS: 'dashboards' +}; + +function isModuleEnabled(module) { + if (module === ModuleNames.CATALOG || module === ModuleNames.GLOSSARIES) { + return ( + getModuleActiveStatus(ModuleNames.DATASETS) || + getModuleActiveStatus(ModuleNames.DASHBOARDS) + ); + } + if (module === ModuleNames.SHARES) { + return getModuleActiveStatus(ModuleNames.DATASETS); + } + + return getModuleActiveStatus(module); +} + +function getModuleActiveStatus(moduleKey) { + if ( + config.modules && + config.modules[moduleKey] && + config.modules[moduleKey].active !== undefined + ) { + return config.modules[moduleKey].active; + } + return false; +} + +export { ModuleNames, isModuleEnabled }; diff --git a/frontend/src/utils/index.js b/frontend/src/utils/index.js new file mode 100644 index 000000000..5a9087d1d --- /dev/null +++ b/frontend/src/utils/index.js @@ -0,0 +1,2 @@ +export * from './constants'; +export * from './helpers'; diff --git a/frontend/src/views/Administration/AdministratorDashboardViewer.js b/frontend/src/views/Administration/AdministratorDashboardViewer.js deleted file mode 100644 index 196738482..000000000 --- a/frontend/src/views/Administration/AdministratorDashboardViewer.js +++ /dev/null @@ -1,411 +0,0 @@ -import { createRef, useCallback, useEffect, useState } from 'react'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import * as ReactIf from 'react-if'; -import { - Box, - Grid, - Card, - CardContent, - CardHeader, - Container, - Divider, - TextField, - Typography, -} from '@mui/material'; -import { AddOutlined, ArrowRightAlt } from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; -import getMonitoringDashboardId from '../../api/Tenant/getMonitoringDashboardId'; -import getMonitoringVPCConnectionId from '../../api/Tenant/getMonitoringVPCConnectionId'; -import updateSSMParameter from "../../api/Tenant/updateSSMParameter"; -import getTrustAccount from '../../api/Environment/getTrustAccount'; -import createQuicksightDataSourceSet from '../../api/Tenant/createQuicksightDataSourceSet'; -import getPlatformAuthorSession from '../../api/Tenant/getPlatformAuthorSession'; -import getPlatformReaderSession from '../../api/Tenant/getPlatformReaderSession'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import useSettings from '../../hooks/useSettings'; - -const QuickSightEmbedding = require('amazon-quicksight-embedding-sdk'); - -const DashboardViewer = () => { - const dispatch = useDispatch(); - const client = useClient(); - const { settings } = useSettings(); - const [dashboardId, setDashboardId] = useState(''); - const [vpcConnectionId, setVpcConnectionId] = useState(''); - const [trustedAccount, setTrustedAccount] = useState(null); - const [dashboardRef] = useState(createRef()); - const [sessionUrl, setSessionUrl] = useState(null); - const [isOpeningSession, setIsOpeningSession] = useState(false); - const [isCreatingDataSource, setIsCreatingDataSource] = useState(false); - - const fetchTrustedAccount = useCallback(async () => { - const response = await client.query(getTrustAccount()); - if (!response.errors) { - setTrustedAccount(response.data.getTrustAccount); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, [client, dispatch]); - - const fetchMonitoringVPCConnectionId = useCallback( async () => { - const response = await client.query(getMonitoringVPCConnectionId()); - if (!response.errors) { - setVpcConnectionId(response.data.getMonitoringVPCConnectionId); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, [client, dispatch]); - - const fetchMonitoringDashboardId = useCallback( async () => { - const response = await client.query(getMonitoringDashboardId()); - if (!response.errors) { - setDashboardId(response.data.getMonitoringDashboardId); - if (response.data.getMonitoringDashboardId !== "updateme"){ - const resp = await client.query(getPlatformReaderSession(response.data.getMonitoringDashboardId)); - if (!resp.errors){ - setSessionUrl(resp.data.getPlatformReaderSession) - const options = { - url: resp.data.getPlatformReaderSession, - scrolling: 'no', - height: '700px', - width: '100%', - locale: 'en-US', - footerPaddingEnabled: true, - sheetTabsDisabled: false, - printEnabled: false, - maximize: true, - container: dashboardRef.current - }; - QuickSightEmbedding.embedDashboard(options); - }else{ - dispatch({ type: SET_ERROR, error: resp.errors[0].message }); - } - } - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, [client, dispatch, dashboardRef]); - - useEffect(() => { - if (client) { - fetchMonitoringDashboardId().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - fetchMonitoringVPCConnectionId().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - fetchTrustedAccount().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch,fetchMonitoringDashboardId, fetchMonitoringVPCConnectionId, fetchTrustedAccount]); - - async function submitVpc(values, setStatus, setSubmitting, setErrors){ - try { - setVpcConnectionId(values.vpc) - const response = await client.mutate(updateSSMParameter({name:"VPCConnectionId", value:values.vpc})); - if (!response.errors) { - setStatus({success: true}); - setSubmitting(false); - }else{ - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - }; - - async function submitDash(values, setStatus, setSubmitting, setErrors){ - try { - setDashboardId(values.dash) - const response = await client.mutate(updateSSMParameter({name:"DashboardId", value:values.dash})); - if (!response.errors) { - setStatus({success: true}); - setSubmitting(false); - const resp = await client.query(getPlatformReaderSession(values.dash)); - if (!resp.errors){ - setSessionUrl(resp.data.getPlatformReaderSession) - const options = { - url: resp.data.getPlatformReaderSession, - scrolling: 'no', - height: '700px', - width: '100%', - locale: 'en-US', - footerPaddingEnabled: true, - sheetTabsDisabled: false, - printEnabled: false, - maximize: true, - container: dashboardRef.current - }; - QuickSightEmbedding.embedDashboard(options); - }else{ - dispatch({ type: SET_ERROR, error: resp.errors[0].message }); - } - }else{ - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - }; - - async function createQuicksightdata () { - setIsCreatingDataSource(true) - const response = await client.mutate(createQuicksightDataSourceSet({vpcConnectionId})); - if (response.errors) { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setIsCreatingDataSource(false) - } - - const startAuthorSession = async () => { - setIsOpeningSession(true); - const response = await client.query(getPlatformAuthorSession(trustedAccount)); - if (!response.errors) { - window.open(response.data.getPlatformAuthorSession, '_blank'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setIsOpeningSession(false); - }; - - - return ( - - - - - - - - - - 1. Enable Quicksight Enterprise Edition in AWS Account = {trustedAccount}. Check the user guide for more details. - - - - - 2. Create a VPC Connection between Quicksight and RDS VPC. Check the user guide for more details. - - - - - - - - - - - - - - 3. Introduce or Update the VPC Connection ID value in the following box: - - - - - { - await submitVpc(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - Save - - - -
- )} -
-
-
-
- - - - 4. Click on the button to automatically create the data source connecting our RDS Aurora database with Quicksight - - - - - } - sx={{ mt: 1, mb: 2, ml: 2 }} - variant="outlined" - onClick={() => { - createQuicksightdata().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - }} - > - Create Quicksight data source - - - - -
-
-
- - - - - - - - - 5. Go to Quicksight to build your Analysis and publish a Dashboard. Check the user guide for more details. - - - - - } - variant="outlined" - onClick={startAuthorSession} - sx={{ mt: 1, mb: 2, ml: 2 }} - > - Start Quicksight session - - - - - - - - 6. Introduce or update your Dashboard ID - - - - - { - await submitDash(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - Save - - - -
- )} -
-
-
-
-
-
-
- - - -
- - - - - - ); -}; - -export default DashboardViewer; diff --git a/frontend/src/views/Catalog/GlossarySearch.js b/frontend/src/views/Catalog/GlossarySearch.js deleted file mode 100644 index d47a322ed..000000000 --- a/frontend/src/views/Catalog/GlossarySearch.js +++ /dev/null @@ -1,324 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { makeStyles } from '@mui/styles'; -import { TreeItem, TreeView } from '@mui/lab'; -import { Box, CircularProgress, Typography } from '@mui/material'; -import PropTypes from 'prop-types'; -import ArrowDropDownIcon from '@mui/icons-material/ArrowDropDown'; -import ArrowRightIcon from '@mui/icons-material/ArrowRight'; -import * as BsIcons from 'react-icons/bs'; -import listToTree from '../../utils/listToTree'; -import searchGlossary from '../../api/Glossary/searchGlossary'; -import useClient from '../../hooks/useClient'; -import Scrollbar from '../../components/Scrollbar'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import * as Defaults from '../../components/defaults'; - -const useTreeItemStyles = makeStyles((theme) => ({ - root: { - color: theme.palette.text.secondary, - '&:focus > $content, &$selected > $content': { - backgroundColor: `var(--tree-view-bg-color, ${theme.palette.grey[400]})`, - color: 'var(--tree-view-color)' - }, - '&:focus > $content $label, &:hover > $content $label, &$selected > $content $label': - { - backgroundColor: 'transparent' - } - }, - content: { - color: theme.palette.text.secondary, - borderTopRightRadius: theme.spacing(2), - borderBottomRightRadius: theme.spacing(2), - paddingRight: theme.spacing(1), - fontWeight: theme.typography.fontWeightMedium, - '$expanded > &': { - fontWeight: theme.typography.fontWeightRegular - } - }, - group: { - marginLeft: 0, - '& $content': { - paddingLeft: theme.spacing(2) - } - }, - expanded: {}, - selected: {}, - label: { - fontWeight: 'inherit', - color: 'inherit' - }, - labelRoot: { - display: 'flex', - alignItems: 'center', - padding: theme.spacing(1, 0.5) - }, - labelIcon: { - marginRight: theme.spacing(1) - }, - labelText: { - fontWeight: 'inherit', - flexGrow: 1 - } -})); -function StyledTreeItem(props) { - const classes = useTreeItemStyles(); - const { - labelText, - labelIcon: LabelIcon, - labelInfo, - color, - bgColor, - ...other - } = props; - - return ( - - - - {labelText} - - - {labelInfo} - -
- } - style={{ - '--tree-view-color': color, - '--tree-view-bg-color': bgColor - }} - classes={{ - root: classes.root, - content: classes.content, - expanded: classes.expanded, - selected: classes.selected, - group: classes.group, - label: classes.label - }} - {...other} - /> - ); -} - -StyledTreeItem.propTypes = { - bgColor: PropTypes.string, - color: PropTypes.string, - labelIcon: PropTypes.elementType.isRequired, - labelInfo: PropTypes.string, - labelText: PropTypes.string.isRequired -}; - -const useStyles = makeStyles({ - root: { - height: 264, - flexGrow: 1, - maxWidth: 400 - } -}); - -const GlossarySearch = ({ matches, setQuery }) => { - const client = useClient(); - const classes = useStyles(); - const dispatch = useDispatch(); - const [tree, setTree] = useState([]); - const [fetchingItems, setFetchingItems] = useState(true); - const [selectedTerms] = useState(matches.map((match) => match.key)); - const getIcon = (nodeItem) => { - if (nodeItem.__typename === 'Glossary') { - return ; - } - if (nodeItem.__typename === 'Category') { - return ; - } - return ; - }; - const select = (node) => { - const terms = [node.nodeUri]; - - setQuery({ - query: { - terms: { - glossary: terms.map((p) => p.toLowerCase()) - } - }, - value: [node.label] - }); - }; - const unselect = (node) => { - const terms = [node.nodeUri]; - - setQuery({ - query: { - terms: { - glossary: terms.map((p) => p.toLowerCase()) - } - }, - value: [node.label] - }); - }; - const isSelected = (node) => selectedTerms.indexOf(node.nodeUri) !== -1; - - const toggle = (node) => { - if (isSelected(node)) { - unselect(node); - } else { - select(node); - } - }; - const fetchItems = useCallback(async () => { - setFetchingItems(true); - const response = await client.query( - searchGlossary(Defaults.SelectListFilter) - ); - if (!response.errors) { - setTree( - listToTree(response.data.searchGlossary.nodes, { - idKey: 'nodeUri', - parentKey: 'parentUri' - }) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setFetchingItems(false); - }, [client, dispatch]); - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, fetchItems]); - return ( - - {fetchingItems ? ( - - ) : ( - - {tree && tree.length > 0 ? ( - - - - } - defaultExpandIcon={} - defaultEndIcon={
} - > - {tree.map((node) => ( - toggle(node)} - labelText={ - - - {node.label} - - - } - labelIcon={() => getIcon(node)} - > - {node.children && - node.children.map((category) => ( - toggle(category)} - labelText={ - - - {category.label} - - - } - labelIcon={() => getIcon(category)} - > - {category.children && - category.children.map((term) => ( - - - {term.label} - - - } - labelIcon={() => getIcon(term)} - color="#1a73e8" - bgColor="#e8f0fe" - onClick={() => toggle(term)} - /> - ))} - - ))} - - ))} - - - - - ) : ( - - - No glossaries found - - - )} - - )} - - ); -}; -GlossarySearch.propTypes = { - setQuery: PropTypes.func.isRequired, - matches: PropTypes.array.isRequired -}; -export default GlossarySearch; diff --git a/frontend/src/views/Catalog/GlossarySearchComponent.js b/frontend/src/views/Catalog/GlossarySearchComponent.js deleted file mode 100644 index 15c134019..000000000 --- a/frontend/src/views/Catalog/GlossarySearchComponent.js +++ /dev/null @@ -1,35 +0,0 @@ -import { ReactiveComponent } from '@appbaseio/reactivesearch'; -import { Box } from '@mui/material'; -import React from 'react'; -import GlossarySearch from './GlossarySearch'; - -const GlossarySearchComponent = (innerClass) => ( - - ({ - aggs: { - glossary: { - terms: { - field: 'glossary' - } - } - } - })} - render={({ aggregations, setQuery }) => { - let matches = []; - if ( - aggregations && - aggregations.glossary && - aggregations.glossary.buckets.length - ) { - matches = aggregations.glossary.buckets; - } - return ; - }} - /> - -); -export default GlossarySearchComponent; diff --git a/frontend/src/views/Catalog/Hit.js b/frontend/src/views/Catalog/Hit.js deleted file mode 100644 index bbfb56980..000000000 --- a/frontend/src/views/Catalog/Hit.js +++ /dev/null @@ -1,342 +0,0 @@ -import { Link as RouterLink } from 'react-router-dom'; -import * as BsIcons from 'react-icons/bs'; -import * as FiIcons from 'react-icons/fi'; -import * as ReactIf from 'react-if'; -import { - Box, - Card, - Chip, - CircularProgress, - Divider, - Grid, - IconButton, - Link, - Tooltip, - Typography -} from '@mui/material'; -import PropTypes from 'prop-types'; -import * as FaIcons from 'react-icons/fa'; -import { LockOpen, ThumbUp } from '@mui/icons-material'; -import React, { useState } from 'react'; -import { MdShowChart } from 'react-icons/md'; -import IconAvatar from '../../components/IconAvatar'; -import RequestAccessModal from './RequestAccessModal'; -import { dayjs } from '../../utils/dayjs'; -import RequestDashboardAccessModal from './RequestDashboardAccessModal'; -import useCardStyle from '../../hooks/useCardStyle'; - -const HitICon = ({ hit }) => ( - - - } /> - - - } /> - - - } /> - - - } /> - - -); - -HitICon.propTypes = { - hit: PropTypes.object.isRequired -}; - -const Hit = ({ hit }) => { - const classes = useCardStyle(); - const [isRequestAccessOpen, setIsRequestAccessOpen] = useState(false); - const [isOpeningModal, setIsOpeningModal] = useState(false); - const [isRequestDashboardAccessOpen, setIsRequestDashboardAccessOpen] = - useState(false); - const [isOpeningDashboardModal, setIsOpeningDashboardModal] = useState(false); - const handleRequestAccessModalOpen = () => { - setIsOpeningModal(true); - setIsRequestAccessOpen(true); - }; - - const handleRequestAccessModalClose = () => { - setIsRequestAccessOpen(false); - }; - - const handleRequestDashboardAccessModalOpen = () => { - setIsOpeningDashboardModal(true); - setIsRequestDashboardAccessOpen(true); - }; - - const handleRequestDashboardAccessModalClose = () => { - setIsOpeningDashboardModal(false); - setIsRequestDashboardAccessOpen(false); - }; - - return ( - - - - - - {hit.resourceKind === 'dataset' && ( - - {hit.label} - - )} - {hit.resourceKind === 'table' && ( - - {hit.label} - - )} - {hit.resourceKind === 'folder' && ( - - {hit.label} - - )} - {hit.resourceKind === 'dashboard' && ( - - {hit.label} - - )} - - by{' '} - - {hit.owner} - {' '} - | created {dayjs(hit.created).fromNow()} - - - - - - - - {hit.description || 'No description provided'} - - - - - - - - Team - - - - - - {hit.admins || '-'} - - - - - - - - - - - {' Environment'} - - - - - - {hit.environmentName || '-'} - - - - - - - - - - Region - - - - - {hit.region} - - - - - - {hit.tags && hit.tags.length > 0 && ( - - {hit.topics.concat(hit.tags.slice(0, 5)).map((tag) => ( - - {tag} - - } - variant="filled" - /> - ))} - - )} - - - - - {isOpeningModal || isOpeningDashboardModal ? ( - - ) : ( - - - hit.resourceKind === 'dashboard' - ? handleRequestDashboardAccessModalOpen() - : handleRequestAccessModalOpen() - } - > - - - - )} - setIsOpeningModal(false)} - /> - setIsOpeningDashboardModal(false)} - /> - - - {(hit.resourceKind === 'dashboard' || hit.resourceKind === 'dataset') && - hit.upvotes !== undefined && - hit.upvotes >= 0 && ( - - - - - - - - {hit.upvotes} - - - - )} - - - ); -}; -Hit.propTypes = { - hit: PropTypes.object.isRequired -}; -export default Hit; diff --git a/frontend/src/views/Catalog/index.js b/frontend/src/views/Catalog/index.js deleted file mode 100644 index 7d05545f0..000000000 --- a/frontend/src/views/Catalog/index.js +++ /dev/null @@ -1,3 +0,0 @@ -import Catalog from './Catalog'; - -export { Catalog }; diff --git a/frontend/src/views/Datasets/DatasetData.js b/frontend/src/views/Datasets/DatasetData.js deleted file mode 100644 index ea349c398..000000000 --- a/frontend/src/views/Datasets/DatasetData.js +++ /dev/null @@ -1,23 +0,0 @@ -import PropTypes from 'prop-types'; -import React from 'react'; -import { Box } from '@mui/material'; -import DatasetTables from './DatasetTables'; -import DatasetFolders from './DatasetFolders'; - -const DatasetData = ({ dataset, isAdmin }) => ( - - - - - - - - -); - -DatasetData.propTypes = { - dataset: PropTypes.object.isRequired, - isAdmin: PropTypes.bool.isRequired -}; - -export default DatasetData; diff --git a/frontend/src/views/Datasets/DatasetSchemaViewer.js b/frontend/src/views/Datasets/DatasetSchemaViewer.js deleted file mode 100644 index d576a673e..000000000 --- a/frontend/src/views/Datasets/DatasetSchemaViewer.js +++ /dev/null @@ -1,187 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import { - Box, - Button, - Card, - CardActions, - CardContent, - Divider, - Grid, - Table, - TableBody, - TableCell, - TableRow, - Tooltip, - Typography -} from '@mui/material'; -import { TableChartOutlined } from '@mui/icons-material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { PagedResponseDefault } from '../../components/defaults'; -import getDatasetSchema from '../../api/Dataset/getDatasetSchema'; -import useClient from '../../hooks/useClient'; -import Pager from '../../components/Pager'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import * as Defaults from '../../components/defaults'; - -const DatasetSchemaItem = (props) => { - const { table } = props; - return ( - - - - - - - - - - {table && table.columns && table.columns.nodes.length > 0 ? ( - table.columns.nodes.map((column) => ( - - - - {column.label}{' '} - {column.columnType.includes('partition') && ( - ({column.columnType}) - )} - - - - - - {column.typeName} - - - - )) - ) : ( - - - - No columns found - - - - )} - -
-
- -
-
- ); -}; - -DatasetSchemaItem.propTypes = { - table: PropTypes.string.isRequired -}; - -const DatasetSchemaViewer = (props) => { - const { dataset } = props; - const dispatch = useDispatch(); - const client = useClient(); - const [loading, setLoading] = useState(true); - const [tables, setTables] = useState(PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.SelectListFilter); - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - getDatasetSchema({ datasetUri: dataset.datasetUri, filter }) - ); - if (!response.errors) { - setTables(response.data.getDataset.tables); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [dataset, client, dispatch, filter]); - const handlePageChange = async (event, value) => { - if (value <= tables.pages && value !== tables.page) { - await setFilter({ ...filter, page: value }); - } - }; - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, fetchItems, dispatch]); - - if (loading) { - return ; - } - if (!tables) { - return null; - } - - return ( - - {tables.nodes.length > 0 ? ( - - - {tables.nodes.map((node) => ( - - ))} - - - - - - ) : ( - - No tables available for this dataset. - - )} - - ); -}; -DatasetSchemaViewer.propTypes = { - dataset: PropTypes.object.isRequired -}; -export default DatasetSchemaViewer; diff --git a/frontend/src/views/Datasets/DatasetSummary.js b/frontend/src/views/Datasets/DatasetSummary.js deleted file mode 100644 index d68a29ad3..000000000 --- a/frontend/src/views/Datasets/DatasetSummary.js +++ /dev/null @@ -1,156 +0,0 @@ -/* import Markdown from 'react-markdown/with-html'; -import { Box, Button, CircularProgress, Container, Paper } from '@mui/material'; -import PropTypes from 'prop-types'; -import { useEffect, useState } from 'react'; -import { useSnackbar } from 'notistack'; -import { styled } from '@mui/styles'; -import { LoadingButton } from '@mui/lab'; -import SimpleMDE from 'react-simplemde-editor'; -import useClient from '../../hooks/useClient'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getDatasetSummary from '../../api/Dataset/getDatasetSummary'; -import saveDatasetSummary from '../../api/Dataset/saveDatasetSummary'; -import PencilAlt from '../../icons/PencilAlt'; - -const MarkdownWrapper = styled('div')(({ theme }) => ({ - color: theme.palette.text.primary, - fontFamily: theme.typography.fontFamily, - '& p': { - marginBottom: theme.spacing(2) - } -})); -const DatasetSummary = (props) => { - const { dataset } = props; - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [content, setContent] = useState(''); - const [isEditorMode, setIsEditorMode] = useState(false); - const [ready, setReady] = useState(false); - // const canEdit = ['BusinessOwner', 'Admin', 'DataSteward', 'Creator'].indexOf(dataset.userRoleForDataset) != -1; - - const handleChange = (value) => { - setContent(value); - }; - const fetchSummary = async () => { - setReady(false); - const response = await client.query(getDatasetSummary(dataset.datasetUri)); - if (!response.errors) { - setContent(response.data.getDatasetSummary === '' ? 'No content found' : response.data.getDatasetSummary); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setReady(true); - }; - - const saveSummary = async () => { - const response = await client.mutate(saveDatasetSummary({ datasetUri: props.dataset.datasetUri, content })); - if (!response.errors) { - enqueueSnackbar('Dataset summary saved', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - setIsEditorMode(false); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - useEffect(() => { - if (client) { - fetchSummary().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client]); - - if (!ready) { - return ; - } - return ( - - - {!isEditorMode && ( - - )} - - {!isEditorMode && ( - - - - - - - - - - )} - - {isEditorMode && ( - - - - - - {isEditorMode && ( - - - Save - - - - )} - - - - )} - - ); -}; - -DatasetSummary.propTypes = { - dataset: PropTypes.object.isRequired -}; - -export default DatasetSummary; */ diff --git a/frontend/src/views/Environments/EnvironmentConsoleAccess.js b/frontend/src/views/Environments/EnvironmentConsoleAccess.js deleted file mode 100644 index 890ab3d27..000000000 --- a/frontend/src/views/Environments/EnvironmentConsoleAccess.js +++ /dev/null @@ -1,93 +0,0 @@ -import { useState } from 'react'; -import PropTypes from 'prop-types'; -import { - Card, - CardContent, - CardHeader, - Divider, - Typography -} from '@mui/material'; -import { useSnackbar } from 'notistack'; -import getEnvironmentAssumeRoleUrl from '../../api/Environment/getEnvironmentAssumeRoleUrl'; -import generateEnvironmentAccessToken from '../../api/Environment/generateEnvironmentAccessToken'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; - -const EnvironmentConsoleAccess = ({ environment }) => { - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - - const generateCredentials = async () => { - const response = await client.query( - generateEnvironmentAccessToken({ - environmentUri: environment.environmentUri - }) - ); - if (!response.errors) { - await navigator.clipboard.writeText( - response.data.generateEnvironmentAccessToken - ); - enqueueSnackbar('Credentials copied to clipboard', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - const goToAWSConsole = async () => { - const response = await client.query( - getEnvironmentAssumeRoleUrl({ - environmentUri: environment.environmentUri - }) - ); - if (!response.errors) { - window.open(response.data.getEnvironmentAssumeRoleUrl, '_blank'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - return ( - - - - - - Account - - - {environment.AwsAccountId} - - - - - S3 bucket - - - arn:aws:s3::: - {environment.EnvironmentDefaultBucketName} - - - - - Admin Team IAM role - - - {environment.EnvironmentDefaultIAMRoleArn} - - - - ); -}; -EnvironmentConsoleAccess.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentConsoleAccess; diff --git a/frontend/src/views/Environments/EnvironmentDatasets.js b/frontend/src/views/Environments/EnvironmentDatasets.js deleted file mode 100644 index c92bf5e7b..000000000 --- a/frontend/src/views/Environments/EnvironmentDatasets.js +++ /dev/null @@ -1,21 +0,0 @@ -import PropTypes from 'prop-types'; -import { Box } from '@mui/material'; -import EnvironmentSharedDatasets from './EnvironmentSharedDatasets'; -import EnvironmentOwnedDatasets from './EnvironmentOwnedDatasets'; - -const EnvironmentDatasets = ({ environment }) => ( - - - - - - - - -); - -EnvironmentDatasets.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentDatasets; diff --git a/frontend/src/views/Environments/EnvironmentFeatures.js b/frontend/src/views/Environments/EnvironmentFeatures.js deleted file mode 100644 index af23b6ffa..000000000 --- a/frontend/src/views/Environments/EnvironmentFeatures.js +++ /dev/null @@ -1,122 +0,0 @@ -import PropTypes from 'prop-types'; -import { - Card, - CardContent, - CardHeader, - Divider, - List, - ListItem, - Typography -} from '@mui/material'; -import React from 'react'; -import Label from '../../components/Label'; - -const EnvironmentFeatures = (props) => { - const { environment, ...other } = props; - - return ( - - - - - - - - Dashboards - - - - - - - - Notebooks - - - - - - - - ML Studio - - - - - - - - Pipelines - - - - - -{/* - - Warehouses - - - - - */} - - - - ); -}; - -EnvironmentFeatures.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentFeatures; diff --git a/frontend/src/views/Environments/EnvironmentNetworking.js b/frontend/src/views/Environments/EnvironmentNetworking.js deleted file mode 100644 index 9bda97964..000000000 --- a/frontend/src/views/Environments/EnvironmentNetworking.js +++ /dev/null @@ -1,240 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import { - Box, - Card, - CardHeader, - Chip, - Divider, - Grid, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { FaNetworkWired } from 'react-icons/fa'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import Label from '../../components/Label'; -import listEnvironmentNetworks from '../../api/Environment/listEnvironmentNetworks'; - -function VpcRow({ vpc }) { - return ( - - - {vpc.label} {vpc.default && } - - {vpc.VpcId} - - {vpc.privateSubnetIds && ( - - {vpc.privateSubnetIds.map((subnet) => ( - - ))} - - )} - - - {vpc.publicSubnetIds && ( - - {vpc.publicSubnetIds.map((subnet) => ( - - ))} - - )} - - - ); -} - -VpcRow.propTypes = { - vpc: PropTypes.any -}; -const EnvironmentNetworks = ({ environment }) => { - const client = useClient(); - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(true); - const [inputValue, setInputValue] = useState(''); - - const fetchItems = useCallback(async () => { - try { - const response = await client.query( - listEnvironmentNetworks({ - environmentUri: environment.environmentUri, - filter - }) - ); - if (!response.errors) { - setItems({ ...response.data.listEnvironmentNetworks }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoading(false); - } - }, [client, dispatch, environment, filter]); - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, dispatch, fetchItems]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - return ( - - - } - title={ - - Networks - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - - - - - - - Name - Vpc ID - Private Subnets - Public Subnets - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((vpc) => ( - - )) - ) : ( - - No VPC found - - )} - - )} -
- {!loading && items.nodes.length > 0 && ( - - )} -
-
-
-
- ); -}; - -EnvironmentNetworks.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentNetworks; diff --git a/frontend/src/views/Environments/EnvironmentRoleAddForm.js b/frontend/src/views/Environments/EnvironmentRoleAddForm.js deleted file mode 100644 index 09cd47f28..000000000 --- a/frontend/src/views/Environments/EnvironmentRoleAddForm.js +++ /dev/null @@ -1,257 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; -import { - Autocomplete, - Box, - Card, - CardContent, - CardHeader, - CircularProgress, - Dialog, - Divider, - FormControlLabel, - FormGroup, - FormHelperText, - MenuItem, - Paper, - Switch, - TextField, - Typography -} from '@mui/material'; -import { Formik } from 'formik'; -import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; -import { GroupAddOutlined } from '@mui/icons-material'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import addConsumptionRoleToEnvironment from '../../api/Environment/addConsumptionRoleToEnvironment' -const EnvironmentRoleAddForm = (props) => { - const { environment, onClose, open, reloadRoles, ...other } = props; - const { enqueueSnackbar } = useSnackbar(); - const dispatch = useDispatch(); - const client = useClient(); - const [items, setItems] = useState([]); - const [loadingGroups, setLoadingGroups] = useState(true); - const [groupOptions, setGroupOptions] = useState([]); - const [roleError, setRoleError] = useState(null); - - const fetchGroups = async (environmentUri) => { - try { - setLoadingGroups(true) - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri - }) - ); - if (!response.errors) { - setGroupOptions( - response.data.listEnvironmentGroups.nodes.map((g) => ({ - value: g.groupUri, - label: g.groupUri - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoadingGroups(false); - } - }; - - useEffect(() => { - if (client && environment) { - fetchGroups( - environment.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, environment, dispatch]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate(addConsumptionRoleToEnvironment - ({ - groupUri: values.groupUri, - consumptionRoleName: values.consumptionRoleName, - IAMRoleArn: values.IAMRoleArn, - environmentUri: environment.environmentUri - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('IAM role added to environment', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - if (reloadRoles) { - reloadRoles(); - } - if (onClose) { - onClose(); - } - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - if (!environment) { - return null; - } - - if (loadingGroups) { - return ; - } - - return ( - - - - Add a consumption IAM role to environment {environment.label} - - - An IAM consumption role is owned by the selected Team. The owners team request access on behalf of this IAM role, which can be used by downstream applications. - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - } - color="primary" - disabled={isSubmitting} - type="submit" - variant="contained" - > - Add Consumption Role - - - -
- )} -
-
-
-
- ); -}; - -EnvironmentRoleAddForm.propTypes = { - environment: PropTypes.object.isRequired, - onClose: PropTypes.func, - open: PropTypes.bool.isRequired, - reloadRoles: PropTypes.func -}; - -export default EnvironmentRoleAddForm; diff --git a/frontend/src/views/Environments/EnvironmentTeams.js b/frontend/src/views/Environments/EnvironmentTeams.js deleted file mode 100644 index acc9a2ff1..000000000 --- a/frontend/src/views/Environments/EnvironmentTeams.js +++ /dev/null @@ -1,604 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import { - Box, - Button, - Card, - CardHeader, - Chip, - Divider, - Grid, - IconButton, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { - CopyAllOutlined, - DeleteOutlined, - GroupAddOutlined, - SupervisedUserCircleRounded -} from '@mui/icons-material'; -import { useSnackbar } from 'notistack'; -import * as FaIcons from 'react-icons/fa'; -import { LoadingButton } from '@mui/lab'; -import { useTheme } from '@mui/styles'; -import { HiUserRemove } from 'react-icons/hi'; -import { VscChecklist } from 'react-icons/vsc'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import Label from '../../components/Label'; -import EnvironmentTeamInviteForm from './EnvironmentTeamInviteForm'; -import EnvironmentRoleAddForm from './EnvironmentRoleAddForm'; -import removeGroupFromEnvironment from '../../api/Environment/removeGroup'; -import removeConsumptionRoleFromEnvironment from '../../api/Environment/removeConsumptionRole'; -import getEnvironmentAssumeRoleUrl from '../../api/Environment/getEnvironmentAssumeRoleUrl'; -import EnvironmentTeamInviteEditForm from './EnvironmentTeamInviteEditForm'; -import generateEnvironmentAccessToken from '../../api/Environment/generateEnvironmentAccessToken'; -import listAllEnvironmentGroups from '../../api/Environment/listAllEnvironmentGroups'; -import listAllEnvironmentConsumptionRoles from '../../api/Environment/listAllEnvironmentConsumptionRoles'; - -function TeamRow({ team, environment, fetchItems }) { - const client = useClient(); - const dispatch = useDispatch(); - const theme = useTheme(); - const { enqueueSnackbar } = useSnackbar(); - const [accessingConsole, setAccessingConsole] = useState(false); - const [loadingCreds, setLoadingCreds] = useState(false); - const [isTeamEditModalOpen, setIsTeamEditModalOpen] = useState(false); - const handleTeamEditModalClose = () => { - setIsTeamEditModalOpen(false); - }; - - const handleTeamEditModalOpen = () => { - setIsTeamEditModalOpen(true); - }; - - const removeGroup = async (groupUri) => { - try { - const response = await client.mutate( - removeGroupFromEnvironment({ - environmentUri: environment.environmentUri, - groupUri - }) - ); - if (!response.errors) { - enqueueSnackbar('Team removed from environment', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - if (fetchItems) { - fetchItems(); - } - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }; - - const getConsoleLink = async (groupUri) => { - setAccessingConsole(true); - const response = await client.query( - getEnvironmentAssumeRoleUrl({ - environmentUri: environment.environmentUri, - groupUri - }) - ); - if (!response.errors) { - window.open(response.data.getEnvironmentAssumeRoleUrl, '_blank'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setAccessingConsole(false); - }; - - const generateCredentials = async (groupUri) => { - setLoadingCreds(true); - const response = await client.query( - generateEnvironmentAccessToken({ - environmentUri: environment.environmentUri, - groupUri - }) - ); - if (!response.errors) { - await navigator.clipboard.writeText( - response.data.generateEnvironmentAccessToken - ); - enqueueSnackbar('Credentials copied to clipboard', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoadingCreds(false); - }; - return ( - - - {team.groupUri}{' '} - {team.groupUri === environment.SamlGroupName && ( - - )} - - {team.environmentIAMRoleArn} - {team.environmentAthenaWorkGroup} - - {team.groupUri !== environment.SamlGroupName ? ( - handleTeamEditModalOpen(team)}> - - - ) : ( - - )} - {isTeamEditModalOpen && ( - - )} - - - - getConsoleLink(team.groupUri)} - > - - - generateCredentials(team.groupUri)} - > - - - {team.groupUri !== environment.SamlGroupName && ( - removeGroup(team.groupUri)}> - - - )} - - - - ); -} - -TeamRow.propTypes = { - team: PropTypes.any, - environment: PropTypes.any, - fetchItems: PropTypes.any -}; - -const EnvironmentTeams = ({ environment }) => { - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [roles, setRoles] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [filterRoles, setFilterRoles] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(true); - const [inputValue, setInputValue] = useState(''); - const [inputValueRoles, setInputValueRoles] = useState(''); - const [isTeamInviteModalOpen, setIsTeamInviteModalOpen] = useState(false); - const [isAddRoleModalOpen, setIsAddRoleModalOpen] = useState(false); - const handleTeamInviteModalOpen = () => { - setIsTeamInviteModalOpen(true); - }; - const handleTeamInviteModalClose = () => { - setIsTeamInviteModalOpen(false); - }; - const handleAddRoleModalOpen = () => { - setIsAddRoleModalOpen(true); - }; - const handleAddRoleModalClose = () => { - setIsAddRoleModalOpen(false); - }; - - const fetchItems = useCallback(async () => { - try { - const response = await client.query( - listAllEnvironmentGroups({ - environmentUri: environment.environmentUri, - filter - }) - ); - if (!response.errors) { - setItems({ ...response.data.listAllEnvironmentGroups }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoading(false); - } - }, [client, dispatch, environment, filter]); - - const fetchRoles= useCallback(async () => { - try { - const response = await client.query( - listAllEnvironmentConsumptionRoles({ - environmentUri: environment.environmentUri, - filterRoles - }) - ); - if (!response.errors) { - setRoles({ ...response.data.listAllEnvironmentConsumptionRoles }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoading(false); - } - }, [client, dispatch, environment, filterRoles]); - - const removeConsumptionRole = async (consumptionGroupUri) => { - console.log(consumptionGroupUri) - try { - const response = await client.mutate( - removeConsumptionRoleFromEnvironment({ - environmentUri: environment.environmentUri, - consumptionRoleUri: consumptionGroupUri - }) - ); - if (!response.errors) { - enqueueSnackbar('Consumption Role removed from environment', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - fetchRoles(); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }; - - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - fetchRoles().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, filterRoles.page, fetchItems, fetchRoles, dispatch]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - const handleInputChangeRoles = (event) => { - setInputValueRoles(event.target.value); - setFilterRoles({ ...filterRoles, term: event.target.value }); - }; - - const handleInputKeyupRoles = (event) => { - if (event.code === 'Enter') { - fetchRoles().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChangeRoles = async (event, value) => { - if (value <= roles.pages && value !== roles.page) { - await setFilterRoles({ ...filterRoles, page: value }); - } - }; - - return ( - - - - } - title={ - - {' '} - Environment Teams - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - - {isTeamInviteModalOpen && ( - - )} - - - - - - - - Name - IAM Role - Athena WorkGroup - Permissions - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((team) => ( - - )) - ) : ( - - No Team invited - - )} - - )} -
- {!loading && items.nodes.length > 0 && ( - - )} -
-
-
-
- - - } - title={ - - {' '} - Environment Consumption IAM roles - - } - /> - - - - - - - - ) - }} - onChange={handleInputChangeRoles} - onKeyUp={handleInputKeyupRoles} - placeholder="Search" - value={inputValueRoles} - variant="outlined" - /> - - - - - {isAddRoleModalOpen && ( - - )} - - - - - - - - Name - IAM Role - Role Owner - Action - - - {loading ? ( - - ) : ( - - {roles.nodes.length > 0 ? ( - roles.nodes.map((role) => ( - - {role.consumptionRoleName} - {role.IAMRoleArn} - {role.groupUri} - - removeConsumptionRole(role.consumptionRoleUri)}> - - - - - )) - ) : ( - - No Consumption IAM Role added - - )} - - )} -
- {!loading && roles.nodes.length > 0 && ( - - )} -
-
-
-
-
- ); -}; - -EnvironmentTeams.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentTeams; diff --git a/frontend/src/views/Environments/EnvironmentWarehouses.js b/frontend/src/views/Environments/EnvironmentWarehouses.js deleted file mode 100644 index 15670f518..000000000 --- a/frontend/src/views/Environments/EnvironmentWarehouses.js +++ /dev/null @@ -1,218 +0,0 @@ -import PropTypes from 'prop-types'; -import { useCallback, useEffect, useState } from 'react'; -import { - Box, - Button, - Card, - CardHeader, - Divider, - Grid, - IconButton, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { useNavigate } from 'react-router'; -import { CloudDownloadOutlined } from '@mui/icons-material'; -import { Link as RouterLink } from 'react-router-dom'; -import { GoDatabase } from 'react-icons/go'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import StackStatus from '../../components/StackStatus'; -import ArrowRightIcon from '../../icons/ArrowRight'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import listEnvironmentClusters from '../../api/RedshiftCluster/listEnvironmentClusters'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import PlusIcon from '../../icons/Plus'; - -const EnvironmentWarehouses = ({ environment }) => { - const client = useClient(); - const navigate = useNavigate(); - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(null); - const [inputValue, setInputValue] = useState(''); - - const fetchItems = useCallback(async () => { - try { - const response = await client.query( - listEnvironmentClusters(environment.environmentUri, filter) - ); - if (!response.errors) { - setItems({ ...response.data.listEnvironmentClusters }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoading(false); - } - }, [client, dispatch, filter, environment]); - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, dispatch, fetchItems]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems(); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - return ( - - } - title={ - - Redshift Clusters - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - - - - - - - - - - Name - Endpoint - Status - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((warehouse) => ( - - {warehouse.label} - {warehouse.endpoint} - - - - - { - navigate( - `/console/warehouse/${warehouse.clusterUri}` - ); - }} - > - - - - - )) - ) : ( - - No Redshift cluster found - - )} - - )} -
- {!loading && items.nodes.length > 0 && ( - - )} -
-
-
- ); -}; - -EnvironmentWarehouses.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentWarehouses; diff --git a/frontend/src/views/KeyValueTags/KeyValueTagList.js b/frontend/src/views/KeyValueTags/KeyValueTagList.js deleted file mode 100644 index ee3ab99d3..000000000 --- a/frontend/src/views/KeyValueTags/KeyValueTagList.js +++ /dev/null @@ -1,141 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import { - Box, - Button, - Card, - CardHeader, - CircularProgress, - Divider, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - Switch -} from '@mui/material'; -import useClient from '../../hooks/useClient'; -import Scrollbar from '../../components/Scrollbar'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import { useSnackbar } from 'notistack'; -import KeyValueTagUpdateForm from './KeyValueTagUpdateForm'; -import listKeyValueTags from '../../api/KeyValueTags/listKeyValueTags'; -import PencilAlt from '../../icons/PencilAlt'; - -const KeyValueTagList = ({ targetUri, targetType }) => { - const client = useClient(); - const dispatch = useDispatch(); - const [items, setItems] = useState([]); - const [openUpdateForm, setOpenUpdateForm] = useState(false); - const [loading, setLoading] = useState(null); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listKeyValueTags(targetUri, targetType) - ); - if (!response.errors) { - setItems(response.data.listKeyValueTags); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, targetType, targetUri]); - - const openUpdate = () => { - setOpenUpdateForm(true); - }; - - const closeUpdate = () => { - fetchItems().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - setOpenUpdateForm(false); - }; - - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, fetchItems]); - - if (loading) { - return ; - } - - return ( - - {items && ( - - {openUpdateForm ? ( - 0 ? items : [{ key: '', value: '', cascade: false }]} - closeUpdate={closeUpdate} - /> - ) : ( - - - - - {items && items.length > 0 && ( - - Key-Value Tags} /> - - - - - - - Key - Value - {targetType == 'environment' && (Cascade enabled)} - - - - {items.map((tag) => ( - - {tag.key || '-'} - {tag.value || '-'} - {targetType == 'environment' && ( - - )} - - ))} - -
-
-
- - )} -
- )} -
- )} - - ); -}; - -KeyValueTagList.propTypes = { - targetType: PropTypes.string.isRequired, - targetUri: PropTypes.string.isRequired -}; - -export default KeyValueTagList; diff --git a/frontend/src/views/MLStudio/NotebookCreateForm.js b/frontend/src/views/MLStudio/NotebookCreateForm.js deleted file mode 100644 index c22ede279..000000000 --- a/frontend/src/views/MLStudio/NotebookCreateForm.js +++ /dev/null @@ -1,425 +0,0 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - MenuItem, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import { useCallback, useEffect, useState } from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import createSagemakerStudioUserProfile from '../../api/SagemakerStudio/createSagemakerStudioUserProfile'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const NotebookCreateForm = (props) => { - const navigate = useNavigate(); - const { enqueueSnackbar } = useSnackbar(); - const dispatch = useDispatch(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [groupOptions, setGroupOptions] = useState([]); - const [environmentOptions, setEnvironmentOptions] = useState([]); - const fetchEnvironments = useCallback(async () => { - setLoading(true); - const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) - ); - if (!response.errors) { - setEnvironmentOptions( - response.data.listEnvironments.nodes.map((e) => ({ - ...e, - value: e.environmentUri, - label: e.label - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch]); - const fetchGroups = async (environmentUri) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri - }) - ); - if (!response.errors) { - setGroupOptions( - response.data.listEnvironmentGroups.nodes.map((g) => ({ - value: g.groupUri, - label: g.groupUri - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }; - useEffect(() => { - if (client) { - fetchEnvironments().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, fetchEnvironments]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate( - createSagemakerStudioUserProfile({ - label: values.label, - environmentUri: values.environment.environmentUri, - description: values.description, - SamlAdminGroupName: values.SamlAdminGroupName, - tags: values.tags - }) - ); - setStatus({ success: true }); - setSubmitting(false); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('ML Studio user profile creation started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/mlstudio/${response.data.createSagemakerStudioUserProfile.sagemakerStudioUserProfileUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - } - } - if (loading) { - return ; - } - - return ( - <> - - ML Studio: Notebook Create | data.all - - - - - - - Create a new ML Studio profile - - } - sx={{ mt: 1 }} - > - - Play - - - ML Studio - - - Create - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - - - - - - { - setFieldValue('SamlGroupName', ''); - fetchGroups( - event.target.value.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - setFieldValue('environment', event.target.value); - }} - select - value={values.environment} - variant="outlined" - > - {environmentOptions.map((environment) => ( - - {environment.label} - - ))} - - - - - - - - - - {errors.submit && ( - - {errors.submit} - - )} - - - Create ML Studio profile - - - - -
- )} -
-
-
-
- - ); -}; - -export default NotebookCreateForm; diff --git a/frontend/src/views/MLStudio/NotebookEditForm.js b/frontend/src/views/MLStudio/NotebookEditForm.js deleted file mode 100644 index 4164db402..000000000 --- a/frontend/src/views/MLStudio/NotebookEditForm.js +++ /dev/null @@ -1,348 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getSagemakerStudioUserProfile from '../../api/SagemakerStudio/getSagemakerStudioUserProfile'; -import updateUserProfile from '../../api/UserProfile/updateUserProfile'; - -const PipelineEditForm = (props) => { - const dispatch = useDispatch(); - const navigate = useNavigate(); - const params = useParams(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [notebook, setNotebook] = useState(null); - - const fetchItem = useCallback(async () => { - setLoading(true); - const response = await client.query( - getSagemakerStudioUserProfile(params.uri) - ); - if ( - !response.errors && - response.data.getSagemakerStudioUserProfile !== null - ) { - setNotebook(response.data.getSagemakerStudioUserProfile); - } else { - const error = response.errors - ? response.errors[0].message - : 'Notebook not found'; - dispatch({ type: SET_ERROR, error }); - } - setLoading(false); - }, [client, dispatch, params.uri]); - - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, dispatch, fetchItem]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate( - updateUserProfile({ - input: { - sagemakerStudioUserProfileUri: - notebook.sagemakerStudioUserProfileUri, - description: values.description, - label: values.label, - tags: values.tags - } - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Notebook updated', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/mlstudio/${response.data.updateUserProfile.sagemakerStudioUserProfileUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - if (loading || !(notebook && notebook.environment)) { - return ; - } - - return ( - <> - - Dataset: Notebook Update | data.all - - - - - - - Edit notebook {notebook.label} - - } - sx={{ mt: 1 }} - > - - Play - - - ML Studio - - - {notebook.label} - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - - - - - - - - - - - - - - - - - Save - - - - -
- )} -
-
-
-
- - ); -}; - -export default PipelineEditForm; diff --git a/frontend/src/views/MLStudio/NotebookList.js b/frontend/src/views/MLStudio/NotebookList.js deleted file mode 100644 index d9fd45670..000000000 --- a/frontend/src/views/MLStudio/NotebookList.js +++ /dev/null @@ -1,173 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import { - Box, - Breadcrumbs, - Button, - Container, - Grid, - Link, - Typography -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import NotebookListItem from './NotebookListItem'; -import listSagemakerStudioUserProfiles from '../../api/SagemakerStudio/listSagemakerStudioUserProfiles'; - -function NotebookPageHeader() { - return ( - - - - ML Studio - - } - sx={{ mt: 1 }} - > - - Play - - - ML Studio - - - - - - - - - - ); -} - -const NotebookList = () => { - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const { settings } = useSettings(); - const [inputValue, setInputValue] = useState(''); - const [loading, setLoading] = useState(true); - const client = useClient(); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listSagemakerStudioUserProfiles(filter) - ); - if (!response.errors) { - setItems(response.data.listSagemakerStudioUserProfiles); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, filter]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - setFilter({page: 1, term: event.target.value}); - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, dispatch, fetchItems]); - - return ( - <> - - ML Studio | data.all - - - - - - - - - - {loading ? ( - - ) : ( - - - {items.nodes.map((node) => ( - - ))} - - - - - )} - - - - - ); -}; - -export default NotebookList; diff --git a/frontend/src/views/MLStudio/NotebookListItem.js b/frontend/src/views/MLStudio/NotebookListItem.js deleted file mode 100644 index b15d7d285..000000000 --- a/frontend/src/views/MLStudio/NotebookListItem.js +++ /dev/null @@ -1,265 +0,0 @@ -import { - Box, - Button, - Card, - Divider, - Grid, - Link, - Tooltip, - Typography -} from '@mui/material'; -import * as FiIcons from 'react-icons/fi'; -import * as FaIcons from 'react-icons/fa'; -import { Link as RouterLink } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; -import { FiCodesandbox } from 'react-icons/fi'; -import React from 'react'; -import IconAvatar from '../../components/IconAvatar'; -import StackStatus from '../../components/StackStatus'; -import Label from '../../components/Label'; -import useCardStyle from '../../hooks/useCardStyle'; - -const NotebookListItem = (props) => { - const { notebook } = props; - const classes = useCardStyle(); - const navigate = useNavigate(); - return ( - - - - - } /> - - { - navigate( - `/console/mlstudio/${notebook.sagemakerStudioUserProfileUri}` - ); - }} - sx={{ - width: '99%', - whiteSpace: 'nowrap', - alignItems: 'left', - overflow: 'hidden', - textOverflow: 'ellipsis', - WebkitBoxOrient: 'vertical', - WebkitLineClamp: 2 - }} - > - - {notebook.label} - - - - by{' '} - - {notebook.owner} - - - - - - - - - {notebook.description || 'No description provided'} - - - - - - - - Role - - - - - - - - - - - - Team - - - - - - {notebook.environment?.SamlGroupName || '-'} - - - - - - - - - - Account - - - - - {notebook.environment.AwsAccountId} - - - - - - - - - Region - - - - - {notebook.environment.region} - - - - - - - - - Status - - - - - - - - - - - - - - - - - - - - - ); -}; -NotebookListItem.propTypes = { - notebook: PropTypes.object.isRequired -}; -export default NotebookListItem; diff --git a/frontend/src/views/MLStudio/NotebookOverview.js b/frontend/src/views/MLStudio/NotebookOverview.js deleted file mode 100644 index 24151b80a..000000000 --- a/frontend/src/views/MLStudio/NotebookOverview.js +++ /dev/null @@ -1,43 +0,0 @@ -import { Box, Grid } from '@mui/material'; -import PropTypes from 'prop-types'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; - -const NotebookOverview = (props) => { - const { notebook, ...other } = props; - - return ( - - - - 0 ? notebook.tags : ['-'] - } - /> - - - - - - - ); -}; - -NotebookOverview.propTypes = { - notebook: PropTypes.object.isRequired -}; - -export default NotebookOverview; diff --git a/frontend/src/views/MLStudio/NotebookView.js b/frontend/src/views/MLStudio/NotebookView.js deleted file mode 100644 index be113585a..000000000 --- a/frontend/src/views/MLStudio/NotebookView.js +++ /dev/null @@ -1,261 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; -import { - Box, - Breadcrumbs, - Button, - CircularProgress, - Container, - Divider, - Grid, - Link, - Tab, - Tabs, - Typography -} from '@mui/material'; -import { FaAws, FaTrash } from 'react-icons/fa'; -import { SiJupyter } from 'react-icons/si'; -import { useNavigate } from 'react-router'; -import { LoadingButton } from '@mui/lab'; -import { useSnackbar } from 'notistack'; -import { Info } from '@mui/icons-material'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import Stack from '../Stack/Stack'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import getSagemakerStudioUserProfile from '../../api/SagemakerStudio/getSagemakerStudioUserProfile'; -import deleteSagemakerStudioUserProfile from '../../api/SagemakerStudio/deleteSagemakerStudioUserProfile'; -import NotebookOverview from './NotebookOverview'; -import getSagemakerStudioUserProfilePresignedUrl from '../../api/SagemakerStudio/getSagemakerStudioUserProfilePresignedUrl'; -import StackStatus from '../Stack/StackStatus'; - -const tabs = [ - { label: 'Overview', value: 'overview', icon: }, - { label: 'Stack', value: 'stack', icon: } -]; - -const NotebookView = () => { - const dispatch = useDispatch(); - const { settings } = useSettings(); - const { enqueueSnackbar } = useSnackbar(); - const params = useParams(); - const client = useClient(); - const navigate = useNavigate(); - const [currentTab, setCurrentTab] = useState('overview'); - const [loading, setLoading] = useState(true); - const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); - const [notebook, setNotebook] = useState(null); - const [stack, setStack] = useState(null); - const [isOpeningSagemakerStudio, setIsOpeningSagemakerStudio] = - useState(false); - - const handleDeleteObjectModalOpen = () => { - setIsDeleteObjectModalOpen(true); - }; - - const handleDeleteObjectModalClose = () => { - setIsDeleteObjectModalOpen(false); - }; - - const fetchItem = useCallback(async () => { - setLoading(true); - const response = await client.query( - getSagemakerStudioUserProfile(params.uri) - ); - if (!response.errors) { - setNotebook(response.data.getSagemakerStudioUserProfile); - if (stack) { - setStack(response.data.getSagemakerStudioUserProfile.stack); - } - } else { - const error = response.errors - ? response.errors[0].message - : 'Notebook not found'; - dispatch({ type: SET_ERROR, error }); - } - setLoading(false); - }, [client, dispatch, params.uri, stack]); - - const getNotebookPresignedUrl = async () => { - setIsOpeningSagemakerStudio(true); - const response = await client.query( - getSagemakerStudioUserProfilePresignedUrl( - notebook.sagemakerStudioUserProfileUri - ) - ); - if (!response.errors) { - window.open(response.data.getSagemakerStudioUserProfilePresignedUrl); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setIsOpeningSagemakerStudio(false); - }; - - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, dispatch, fetchItem]); - - const handleTabsChange = (event, value) => { - setCurrentTab(value); - }; - const removeNotebook = async (deleteFromAWS = false) => { - const response = await client.mutate( - deleteSagemakerStudioUserProfile( - notebook.sagemakerStudioUserProfileUri, - deleteFromAWS - ) - ); - if (!response.errors) { - handleDeleteObjectModalClose(); - enqueueSnackbar('ML Studio Profile deleted', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate('/console/mlstudio'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - if (loading) { - return ; - } - if (!notebook) { - return null; - } - - return ( - <> - - ML Studio: Profile Details | DataStudio - - - - - - - - Notebook {notebook.label} - - } - sx={{ mt: 1 }} - > - - Play - - - ML Studio - - - {notebook.label} - - - - - - } - sx={{ m: 1 }} - onClick={getNotebookPresignedUrl} - type="button" - variant="outlined" - > - Open JupyterLab - - - - - - - - {tabs.map((tab) => ( - - ))} - - - - - {currentTab === 'overview' && ( - - )} - {currentTab === 'stack' && ( - - )} - - - - - - ); -}; - -export default NotebookView; diff --git a/frontend/src/views/Networks/NetworkList.js b/frontend/src/views/Networks/NetworkList.js deleted file mode 100644 index a845e6d0c..000000000 --- a/frontend/src/views/Networks/NetworkList.js +++ /dev/null @@ -1,303 +0,0 @@ -import PropTypes from 'prop-types'; -import React, { useCallback, useEffect, useState } from 'react'; -import { - Box, - Card, - CardHeader, - Chip, - Divider, - Grid, - IconButton, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { FaNetworkWired } from 'react-icons/fa'; -import { LoadingButton } from '@mui/lab'; -import { DeleteOutlined } from '@mui/icons-material'; -import { useSnackbar } from 'notistack'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import SearchIcon from '../../icons/Search'; -import Scrollbar from '../../components/Scrollbar'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import Pager from '../../components/Pager'; -import Label from '../../components/Label'; -import listEnvironmentNetworks from '../../api/Environment/listEnvironmentNetworks'; -import PlusIcon from '../../icons/Plus'; -import NetworkCreateModal from './NetworkCreateModal'; -import deleteNetwork from '../../api/Vpc/deleteNetwork'; - -function VpcRow({ vpc, deleteVpcNetwork }) { - return ( - - - {vpc.label} {vpc.default && } - - {vpc.VpcId} - - {vpc.privateSubnetIds && ( - - {vpc.privateSubnetIds.map((subnet) => ( - - ))} - - )} - - - {vpc.publicSubnetIds && ( - - {vpc.publicSubnetIds.map((subnet) => ( - - ))} - - )} - - - { - deleteVpcNetwork(vpc.vpcUri); - }} - > - - - - - ); -} - -VpcRow.propTypes = { - vpc: PropTypes.any, - deleteVpcNetwork: PropTypes.func -}; -const EnvironmentNetworks = ({ environment }) => { - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(true); - const [inputValue, setInputValue] = useState(''); - const [isNetworkCreateOpen, setIsNetworkCreateOpen] = useState(false); - const handleNetworkCreateModalOpen = () => { - setIsNetworkCreateOpen(true); - }; - - const handleNetworkCreateModalClose = () => { - setIsNetworkCreateOpen(false); - }; - - const fetchItems = useCallback(async () => { - try { - const response = await client.query( - listEnvironmentNetworks({ - environmentUri: environment.environmentUri, - filter - }) - ); - if (!response.errors) { - setItems({ ...response.data.listEnvironmentNetworks }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } finally { - setLoading(false); - } - }, [client, dispatch, filter, environment.environmentUri]); - - const deleteVpcNetwork = async (vpcUri) => { - const response = await client.mutate(deleteNetwork({ vpcUri })); - if (!response.errors) { - enqueueSnackbar('Network deleted', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, fetchItems, dispatch]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - return ( - - - } - title={ - - Networks - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - } - sx={{ m: 1 }} - variant="outlined" - > - Add - - - - - - - - - Name - VPC - Private Subnets - Public Subnets - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((vpc) => ( - - )) - ) : ( - - No VPC found - - )} - - )} -
- {!loading && items.nodes.length > 0 && ( - - )} -
-
-
- {isNetworkCreateOpen && ( - - )} -
- ); -}; - -EnvironmentNetworks.propTypes = { - environment: PropTypes.object.isRequired -}; - -export default EnvironmentNetworks; diff --git a/frontend/src/views/Notebooks/NotebookCreateForm.js b/frontend/src/views/Notebooks/NotebookCreateForm.js deleted file mode 100644 index 9ace9c33c..000000000 --- a/frontend/src/views/Notebooks/NotebookCreateForm.js +++ /dev/null @@ -1,589 +0,0 @@ -import { Link as RouterLink, useNavigate } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - MenuItem, - Slider, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { Autocomplete, LoadingButton } from '@mui/lab'; -import { useCallback, useEffect, useState } from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import createSagemakerNotebook from '../../api/SagemakerNotebook/createSagemakerNotebook'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const NotebookCreateForm = (props) => { - const navigate = useNavigate(); - const { enqueueSnackbar } = useSnackbar(); - const dispatch = useDispatch(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [groupOptions, setGroupOptions] = useState([]); - const [environmentOptions, setEnvironmentOptions] = useState([]); - const [vpcOptions, setVpcOptions] = useState([]); - const [subnetOptions, setSubnetOptions] = useState([]); - const marks = [ - { - value: 32, - label: '32' - }, - { - value: 64, - label: '64' - }, - { - value: 128, - label: '128' - }, - { - value: 256, - label: '256' - } - ]; - const instanceTypes = [ - { label: 'ml.t3.medium', value: 'ml.t3.medium' }, - { label: 'ml.t3.large', value: 'ml.t3.large' }, - { label: 'ml.m5.xlarge', value: 'ml.m5.xlarge' } - ]; - - const fetchEnvironments = useCallback(async () => { - setLoading(true); - const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) - ); - if (!response.errors) { - setEnvironmentOptions( - response.data.listEnvironments.nodes.map((e) => ({ - ...e, - value: e.environmentUri, - label: e.label - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch]); - const fetchGroups = async (environmentUri) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri - }) - ); - if (!response.errors) { - setGroupOptions( - response.data.listEnvironmentGroups.nodes.map((g) => ({ - value: g.groupUri, - label: g.groupUri - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }; - useEffect(() => { - if (client) { - fetchEnvironments().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, fetchEnvironments]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate( - createSagemakerNotebook({ - label: values.label, - environmentUri: values.environment.environmentUri, - description: values.description, - SamlAdminGroupName: values.SamlAdminGroupName, - tags: values.tags, - VpcId: values.VpcId, - SubnetId: values.SubnetId, - VolumeSizeInGB: values.VolumeSizeInGB, - InstanceType: values.InstanceType - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Sagemaker instance creation started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/notebooks/${response.data.createSagemakerNotebook.notebookUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - } - } - if (loading) { - return ; - } - - return ( - <> - - Notebooks: Notebook Create | data.all - - - - - - - Create a new notebook - - } - sx={{ mt: 1 }} - > - - Play - - - Notebooks - - - Create - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - - - - - {instanceTypes.map((i) => ( - - {i.label} - - ))} - - - - - - Volume size - - { - setFieldValue('VolumeSizeInGB', value); - }} - /> - {touched.VolumeSizeInGB && errors.VolumeSizeInGB && ( - - - {errors.VolumeSizeInGB} - - - )} - - - - - - - - - { - setFieldValue('SamlAdminGroupName', ''); - fetchGroups( - event.target.value.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - setFieldValue('environment', event.target.value); - setVpcOptions( - event.target.value.networks.map((v) => ({ - ...v, - value: v, - label: v.VpcId - })) - ); - }} - select - value={values.environment} - variant="outlined" - > - {environmentOptions.map((environment) => ( - - {environment.label} - - ))} - - - - - - - - - - - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - - - - option.label)} - onChange={(event, value) => { - setSubnetOptions([]); - const filteredVpc = vpcOptions.filter( - (v) => v.VpcId === value - ); - if ( - value && - vpcOptions && - filteredVpc.length === 1 - ) { - setSubnetOptions( - filteredVpc[0].privateSubnetIds.concat( - filteredVpc[0].publicSubnetIds - ) - ); - setFieldValue('VpcId', value); - } else { - setFieldValue('VpcId', value); - } - }} - renderInput={(params) => ( - - )} - /> - - - option)} - onChange={(event, value) => { - setFieldValue('SubnetId', value); - }} - renderInput={(params) => ( - - )} - /> - - - - - {errors.submit && ( - - {errors.submit} - - )} - - - Create Notebook - - - - -
- )} -
-
-
-
- - ); -}; - -export default NotebookCreateForm; diff --git a/frontend/src/views/Notebooks/NotebookEditForm.js b/frontend/src/views/Notebooks/NotebookEditForm.js deleted file mode 100644 index 93996fc9b..000000000 --- a/frontend/src/views/Notebooks/NotebookEditForm.js +++ /dev/null @@ -1,348 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getSagemakerStudioUserProfile from '../../api/SagemakerStudio/getSagemakerStudioUserProfile'; -import updateUserProfile from '../../api/UserProfile/updateUserProfile'; - -const PipelineEditForm = (props) => { - const dispatch = useDispatch(); - const navigate = useNavigate(); - const params = useParams(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [notebook, setNotebook] = useState(null); - - const fetchItem = useCallback(async () => { - setLoading(true); - const response = await client.query( - getSagemakerStudioUserProfile(params.uri) - ); - if ( - !response.errors && - response.data.getSagemakerStudioUserProfile !== null - ) { - setNotebook(response.data.getSagemakerStudioUserProfile); - } else { - const error = response.errors - ? response.errors[0].message - : 'Notebook not found'; - dispatch({ type: SET_ERROR, error }); - } - setLoading(false); - }, [client, dispatch, params.uri]); - - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, dispatch, fetchItem]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = await client.mutate( - updateUserProfile({ - input: { - sagemakerStudioUserProfileUri: - notebook.sagemakerStudioUserProfileUri, - description: values.description, - label: values.label, - tags: values.tags - } - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Notebook updated', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/notebooks/${response.data.updateUserProfile.sagemakerStudioUserProfileUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - if (loading || !(notebook && notebook.environment)) { - return ; - } - - return ( - <> - - Dataset: Notebook Update | data.all - - - - - - - Edit notebook {notebook.label} - - } - sx={{ mt: 1 }} - > - - Discover - - - Datasets - - - {notebook.label} - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - - - - - - - - - - - - - - - - - Save - - - - -
- )} -
-
-
-
- - ); -}; - -export default PipelineEditForm; diff --git a/frontend/src/views/Notebooks/NotebookList.js b/frontend/src/views/Notebooks/NotebookList.js deleted file mode 100644 index deba2258d..000000000 --- a/frontend/src/views/Notebooks/NotebookList.js +++ /dev/null @@ -1,171 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import { - Box, - Breadcrumbs, - Button, - Container, - Grid, - Link, - Typography -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import NotebookListItem from './NotebookListItem'; -import listSagemakerNotebooks from '../../api/SagemakerNotebook/listSagemakerNotebooks'; - -function NotebookPageHeader() { - return ( - - - - Notebooks - - } - sx={{ mt: 1 }} - > - - Play - - - Notebooks - - - - - - - - - - ); -} - -const NotebookList = () => { - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const { settings } = useSettings(); - const [inputValue, setInputValue] = useState(''); - const [loading, setLoading] = useState(true); - const client = useClient(); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query(listSagemakerNotebooks(filter)); - if (!response.errors) { - setItems(response.data.listSagemakerNotebooks); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, filter]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - setFilter({page: 1, term: event.target.value}); - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, fetchItems, dispatch]); - - return ( - <> - - Notebooks | data.all - - - - - - - - - - {loading ? ( - - ) : ( - - - {items.nodes.map((node) => ( - - ))} - - - - - )} - - - - - ); -}; - -export default NotebookList; diff --git a/frontend/src/views/Notebooks/NotebookListItem.js b/frontend/src/views/Notebooks/NotebookListItem.js deleted file mode 100644 index 74d841300..000000000 --- a/frontend/src/views/Notebooks/NotebookListItem.js +++ /dev/null @@ -1,242 +0,0 @@ -import { - Box, - Button, - Card, - Divider, - Grid, - Link, - Tooltip, - Typography -} from '@mui/material'; -import * as FiIcons from 'react-icons/fi'; -import * as FaIcons from 'react-icons/fa'; -import PropTypes from 'prop-types'; -import { useNavigate } from 'react-router'; -import { SiJupyter } from 'react-icons/si'; -import { Link as RouterLink } from 'react-router-dom'; -import React from 'react'; -import IconAvatar from '../../components/IconAvatar'; -import StackStatus from '../../components/StackStatus'; -import Label from '../../components/Label'; -import useCardStyle from '../../hooks/useCardStyle'; - -/** - * @description NotebookListItem view. - * @param {Object} props - * @return {JSX.Element} - */ -const NotebookListItem = (props) => { - const { notebook } = props; - const classes = useCardStyle(); - const navigate = useNavigate(); - - return ( - - - - - } /> - - { - navigate(`/console/notebooks/${notebook.notebookUri}`); - }} - sx={{ - width: '99%', - whiteSpace: 'nowrap', - alignItems: 'left', - overflow: 'hidden', - textOverflow: 'ellipsis', - WebkitBoxOrient: 'vertical', - WebkitLineClamp: 2 - }} - > - - {notebook.label} - - - - by{' '} - - {notebook.owner} - - - - - - - - - - - Role - - - - - - - - - - - - - - Team - - - - - - {notebook.SamlAdminGroupName || '-'} - - - - - - - - - - Account - - - - - {notebook.environment.AwsAccountId} - - - - - - - - - Region - - - - - {notebook.environment.region} - - - - - - - - - Status - - - - - - - - - - - - - - - - - - - - - - ); -}; - -NotebookListItem.propTypes = { - notebook: PropTypes.object.isRequired -}; - -export default NotebookListItem; diff --git a/frontend/src/views/Notebooks/NotebookOverview.js b/frontend/src/views/Notebooks/NotebookOverview.js deleted file mode 100644 index 15e47a3c7..000000000 --- a/frontend/src/views/Notebooks/NotebookOverview.js +++ /dev/null @@ -1,52 +0,0 @@ -import { Box, Grid } from '@mui/material'; -import PropTypes from 'prop-types'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; -import NotebookInstanceProperties from './NotebookInstanceProperties'; - -/** - * @description NotebookOverview view. - * @param {NotebookOverview.propTypes} props - * @return {JSX.Element} - */ -const NotebookOverview = (props) => { - const { notebook, ...other } = props; - - return ( - - - - 0 ? notebook.tags : ['-'] - } - /> - - - - - - - - - - ); -}; - -NotebookOverview.propTypes = { - notebook: PropTypes.object.isRequired -}; - -export default NotebookOverview; diff --git a/frontend/src/views/Notebooks/NotebookView.js b/frontend/src/views/Notebooks/NotebookView.js deleted file mode 100644 index 7e13c4198..000000000 --- a/frontend/src/views/Notebooks/NotebookView.js +++ /dev/null @@ -1,400 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; -import { - Box, - Breadcrumbs, - Button, - CircularProgress, - Container, - Divider, - Grid, - Link, - Tab, - Tabs, - Typography -} from '@mui/material'; -import { BiStopCircle } from 'react-icons/bi'; -import { FaAws, FaTrash } from 'react-icons/fa'; -import { VscDebugStart } from 'react-icons/vsc'; -import { SiJupyter } from 'react-icons/si'; -import { useNavigate } from 'react-router'; -import { LoadingButton } from '@mui/lab'; -import { useSnackbar } from 'notistack'; -import { Info, LocalOffer, RefreshRounded } from '@mui/icons-material'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import Stack from '../Stack/Stack'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import deleteSagemakerNotebook from '../../api/SagemakerNotebook/deleteSagemakerNotebook'; -import getSagemakerNotebook from '../../api/SagemakerNotebook/getSagemakerNotebook'; -import NotebookOverview from './NotebookOverview'; -import getSagemakerNotebookPresignedUrl from '../../api/SagemakerNotebook/getSagemakerNotebookPresignedUrl'; -import stopSagemakerNotebook from '../../api/SagemakerNotebook/stopNotebookInstance'; -import startSagemakerNotebook from '../../api/SagemakerNotebook/startNotebookInstance'; -import StackStatus from '../Stack/StackStatus'; -import KeyValueTagList from '../KeyValueTags/KeyValueTagList'; - -/** - * @description NotebookView component. - * @returns {JSX.Element|null} - */ -const NotebookView = () => { - const getTabs = (isAdvancedMode) => - isAdvancedMode - ? [ - { - label: 'Overview', - value: 'overview', - icon: - }, - { - label: 'Tags', - value: 'tags', - icon: - }, - { label: 'Stack', value: 'stack', icon: } - ] - : []; - const dispatch = useDispatch(); - const { settings } = useSettings(); - const { enqueueSnackbar } = useSnackbar(); - const params = useParams(); - const client = useClient(); - const navigate = useNavigate(); - const [currentTab, setCurrentTab] = useState('overview'); - const [loading, setLoading] = useState(true); - const [isStoppingNotebook, setIsStoppingNotebook] = useState(false); - const [isStartingNotebook, setIsStartingNotebook] = useState(false); - const [isRefreshingNotebook, setIsRefreshingNotebook] = useState(false); - const [notebook, setNotebook] = useState(null); - const [stack, setStack] = useState(null); - const [isOpeningSagemakerNotebook, setIsOpeningSagemakerNotebook] = - useState(false); - const [isStoppedInstance, setIsStoppedInstance] = useState({}); - const [isNotFoundInstance, setNotFoundInstance] = useState({}); - const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); - const [tabs, setTabs] = useState(getTabs(settings.isAdvancedMode)); - - useEffect( - () => setTabs(getTabs(settings.isAdvancedMode)), - [settings.isAdvancedMode] - ); - - const handleDeleteObjectModalOpen = () => { - setIsDeleteObjectModalOpen(true); - }; - - const handleDeleteObjectModalClose = () => { - setIsDeleteObjectModalOpen(false); - }; - - const getNotebookInstance = useCallback(async () => { - const response = await client.query(getSagemakerNotebook(params.uri)); - if (!response.errors) { - setNotebook(response.data.getSagemakerNotebook); - if (response.data.getSagemakerNotebook.stack) { - setStack(response.data.getSagemakerNotebook.stack); - } - const status = response.data.getSagemakerNotebook.NotebookInstanceStatus; - if (status === 'Stopped' || status === 'Stopping') { - setIsStoppedInstance(true); - } else { - setIsStoppedInstance(false); - } - if (status === 'NotFound' || status === 'Pending') { - setNotFoundInstance(true); - } else { - setNotFoundInstance(false); - } - } else { - const error = response.errors - ? response.errors[0].message - : 'Notebook not found'; - dispatch({ type: SET_ERROR, error }); - } - }, [params.uri, client, dispatch]); - - const refreshInstance = async () => { - setIsRefreshingNotebook(true); - await getNotebookInstance(); - enqueueSnackbar('Notebook instance reloaded', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - setIsRefreshingNotebook(false); - }; - - const fetchItem = useCallback(async () => { - setLoading(true); - await getNotebookInstance(); - setLoading(false); - }, [getNotebookInstance]); - - const removeNotebook = async (deleteFromAWS = false) => { - const response = await client.mutate( - deleteSagemakerNotebook(notebook.notebookUri, deleteFromAWS) - ); - if (!response.errors) { - handleDeleteObjectModalClose(); - enqueueSnackbar('Notebook deleted', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate('/console/notebooks'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - const getNotebookPresignedUrl = async () => { - setIsOpeningSagemakerNotebook(true); - const response = await client.query( - getSagemakerNotebookPresignedUrl(notebook.notebookUri) - ); - if (!response.errors) { - window.open(response.data.getSagemakerNotebookPresignedUrl); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setIsOpeningSagemakerNotebook(false); - }; - - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, fetchItem, dispatch]); - - const handleTabsChange = (event, value) => { - setCurrentTab(value); - }; - const stopNotebook = async () => { - setIsStoppingNotebook(true); - const response = await client.mutate( - stopSagemakerNotebook(notebook.notebookUri) - ); - if (!response.errors) { - enqueueSnackbar('Notebook instance is stopping', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - setIsStoppingNotebook(false); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - const startNotebook = async () => { - setIsStartingNotebook(true); - const response = await client.mutate( - startSagemakerNotebook(notebook.notebookUri) - ); - if (!response.errors) { - enqueueSnackbar('Notebook instance starting', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - setIsStartingNotebook(false); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - if (loading) { - return ; - } - if (!notebook) { - return null; - } - - /** - * @description Tab header. - * @type {JSX.Element} - */ - const tabHeader = ( - <> - - - {tabs.map((tab) => ( - - ))} - - - - - ); - - return ( - <> - - Notebooks: Notebook Details | data.all - - - - - - - - Notebook {notebook.label} - - } - sx={{ mt: 1 }} - > - - Play - - - Notebooks - - - {notebook.label} - - - - - - } - sx={{ m: 1 }} - onClick={getNotebookPresignedUrl} - type="button" - variant="outlined" - > - Open JupyterLab - - } - sx={{ m: 1 }} - onClick={stopNotebook} - type="button" - variant="outlined" - > - Stop Instance - - } - sx={{ m: 1 }} - onClick={startNotebook} - type="button" - variant="outlined" - > - Start Instance - - } - sx={{ m: 1 }} - variant="outlined" - onClick={refreshInstance} - > - Refresh - - - - - - {settings.isAdvancedMode && tabHeader} - - {currentTab === 'overview' && ( - - )} - {currentTab === 'tags' && ( - - )} - {currentTab === 'stack' && ( - - )} - - - - - - ); -}; - -export default NotebookView; diff --git a/frontend/src/views/Organizations/MoreMenuEnvironments.js b/frontend/src/views/Organizations/MoreMenuEnvironments.js deleted file mode 100644 index a1d25776d..000000000 --- a/frontend/src/views/Organizations/MoreMenuEnvironments.js +++ /dev/null @@ -1,70 +0,0 @@ -import { memo, useRef, useState } from 'react'; -import { - IconButton, - ListItemIcon, - ListItemText, - Menu, - MenuItem, - Tooltip -} from '@mui/material'; -import RefreshIcon from '@mui/icons-material/Refresh'; -import MoreHorizIcon from '@mui/icons-material/MoreHoriz'; -import PropTypes from 'prop-types'; - -const MoreMenuEnvironments = (props) => { - const anchorRef = useRef(null); - const [openMenu, setOpenMenu] = useState(false); - - const handleMenuOpen = () => { - setOpenMenu(true); - }; - - const handleMenuClose = () => { - setOpenMenu(false); - }; - - return ( - <> - - - - - - - { - props.refresh(); - }} - > - - - - - - - - ); -}; -MoreMenuEnvironments.propTypes = { - refresh: PropTypes.func -}; - -export default memo(MoreMenuEnvironments); diff --git a/frontend/src/views/Organizations/index.js b/frontend/src/views/Organizations/index.js deleted file mode 100644 index 3ddea332b..000000000 --- a/frontend/src/views/Organizations/index.js +++ /dev/null @@ -1,5 +0,0 @@ -import OrganizationList from './OrganizationList'; -import OrganizationListItem from './OrganizationListItem'; -import OrganizationView from './OrganizationView'; - -export { OrganizationList, OrganizationListItem, OrganizationView }; diff --git a/frontend/src/views/Pipelines/PipelineDatasets.js b/frontend/src/views/Pipelines/PipelineDatasets.js deleted file mode 100644 index afd5f573e..000000000 --- a/frontend/src/views/Pipelines/PipelineDatasets.js +++ /dev/null @@ -1,107 +0,0 @@ -import PropTypes from 'prop-types'; -import {useCallback, useState, useEffect } from 'react'; -import { - Card, - CardContent, - CardHeader, - Divider, - List, - ListItem, - Typography -} from '@mui/material'; -import useClient from '../../hooks/useClient'; -import { useDispatch } from '../../store'; -import getDataset from "../../api/Dataset/getDataset"; -import {SET_ERROR} from "../../store/errorReducer"; - - -const PipelineDatasets = (props) => { - const { pipeline } = props; - const client = useClient(); - const dispatch = useDispatch(); - const [loading, setLoading] = useState(false); - const [inputDataset, setInputDataset] = useState(""); - const [outputDataset, setOutputDataset] = useState(""); - - const fetchDatasets = useCallback(async () => { - setLoading(true); - if (pipeline.inputDatasetUri) { - const response = await client.query(getDataset(pipeline.inputDatasetUri)); - if (!response.errors && response.data.getDataset !== null) { - setInputDataset(response.data.getDataset.label); - } else { - const error = response.errors - ? response.errors[0].message - : 'Dataset not found'; - dispatch({type: SET_ERROR, error}); - } - } - if (pipeline.outputDatasetUri) { - const response = await client.query(getDataset(pipeline.outputDatasetUri)); - if (!response.errors && response.data.getDataset !== null) { - setOutputDataset(response.data.getDataset.label); - } else { - const error = response.errors - ? response.errors[0].message - : 'Dataset not found'; - dispatch({type: SET_ERROR, error}); - } - } - setLoading(false); - }, [client, dispatch]); - - useEffect(() => { - if (client) { - fetchDatasets().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, dispatch, fetchDatasets]); - - - return ( - - - - - - - - Input Dataset - - - {inputDataset} - - - - - Output Dataset - - - {outputDataset} - - - - - - ); -}; - -PipelineDatasets.propTypes = { - // @ts-ignore - pipeline: PropTypes.object.isRequired -}; - -export default PipelineDatasets; diff --git a/frontend/src/views/Pipelines/PipelineEnvironmentCreateForm.js b/frontend/src/views/Pipelines/PipelineEnvironmentCreateForm.js deleted file mode 100644 index 5f566be0c..000000000 --- a/frontend/src/views/Pipelines/PipelineEnvironmentCreateForm.js +++ /dev/null @@ -1,273 +0,0 @@ -import React, { useEffect, useState } from 'react'; -import { useSnackbar } from 'notistack'; -import { - Box, - Button, - Card, - CardContent, - CardHeader, - Divider, - Grid, - IconButton, - MenuItem, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import { DeleteOutlined } from '@mui/icons-material'; -import PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import createDataPipelineEnvironment from '../../api/DataPipeline/createDataPipelineEnvironment'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const PipelineEnvironmentCreateForm = (props) => { - const { environmentOptions, triggerEnvSubmit, pipelineUri, handleCountEnvironmentValid } = props; - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const [kvEnvs, setKeyValueEnvs] = useState([]); - const [mapGroups, setMapGroups] = useState(new Map()) - const stageOps =[{value:"dev", label:"dev"},{value:"test", label:"test"},{value:"val", label:"val"},{value:"prod", label:"prod"},{value:"other", label:"other"}]; - const [environmentOps, setEnvironmentOps] = useState( - environmentOptions && environmentOptions.length > 0 ? environmentOptions : [{ environmentUri: 'someUri', label: 'some' },{ environmentUri: 'someUri', label: 'some2' }] - ); - - const fetchGroups = async (environment) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri: environment.environmentUri - }) - ); - - if (!response.errors) { - setMapGroups(new Map(mapGroups.set(environment.environmentUri, response.data.listEnvironmentGroups.nodes)) )//Array of groups (Objects) - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } -}; - - const handleAddEnvRow = () => { - if (kvEnvs.length <= 40) { - const item = { - stage: '', - env: '', - team: '' - }; - setKeyValueEnvs((prevState) => [...prevState, item]); - } else { - dispatch({ - type: SET_ERROR, - error: 'You cannot add more than 40 development stages' - }); - } - }; - - const handleChange = (idx, field) => (e) => { - const { value } = e.target; - - setKeyValueEnvs((prevstate) => { - const rows = [...prevstate]; - if (field === 'stage') { - rows[idx].stage = value; - } else if (field === 'env'){ - rows[idx].environmentLabel = value.label; - rows[idx].environmentUri = value.environmentUri; - } else{ - rows[idx].samlGroupName = value; - } - return rows; - }); - }; - - const handleRemoveEnvRow = (idx) => { - setKeyValueEnvs((prevstate) => { - const rows = [...prevstate]; - rows.splice(idx, 1); - return rows; - }); - }; - - async function submit(element, index) { - try { - const response = await client.mutate( - createDataPipelineEnvironment({ - input: { - stage: element.stage, - order: index+1, - pipelineUri: pipelineUri, - environmentLabel: element.environmentLabel, - environmentUri: element.environmentUri, - samlGroupName: element.samlGroupName - - } - }) - ); - if (!response.errors) { - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - useEffect(() => { - if (client && triggerEnvSubmit && pipelineUri && kvEnvs.length > 0) { - kvEnvs.forEach((element, index) => submit(element, index)) - } - if (client && environmentOptions.length > 0) { - environmentOptions.forEach((element) => fetchGroups(element)) - } - }, [client, dispatch, triggerEnvSubmit, pipelineUri, environmentOptions]); - - useEffect(() => { - if (kvEnvs.length > 0){ - handleCountEnvironmentValid(true) - }else{ - handleCountEnvironmentValid(false) - } - }, [kvEnvs.length]); - - return ( - <> - - - - - - - - - - - - - - - - {kvEnvs && kvEnvs.length > 0 && ( - - - Order - Development Stage - Environment - Team - - - )} - - {kvEnvs.map((item, idx) => ( - <> - - - - - - - {stageOps.map((stage) => ( - - {stage.label} - - ))} - - - - - {environmentOps.map((environment) => ( - - {environment.label} - - ))} - - - - - {mapGroups.get(kvEnvs[idx].environmentUri) && (mapGroups.get(kvEnvs[idx].environmentUri).map((g) => ( - - {g.groupUri} - - )))} - - - - - - ))} - -
- { - handleRemoveEnvRow(idx); - }} - > - - -
- - - -
-
-
-
-
-
- - ); -}; -PipelineEnvironmentCreateForm.propTypes = { - environmentOptions: PropTypes.array.isRequired, - triggerEnvSubmit: PropTypes.bool.isRequired, - pipelineUri: PropTypes.string.isRequired, - handleCountEnvironmentValid: PropTypes.func.isRequired -}; -export default PipelineEnvironmentCreateForm; diff --git a/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js b/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js deleted file mode 100644 index cbe334fdb..000000000 --- a/frontend/src/views/Pipelines/PipelineEnvironmentEditForm.js +++ /dev/null @@ -1,455 +0,0 @@ -import React, { useEffect, useState } from 'react'; -import { useSnackbar } from 'notistack'; -import { - Box, - Button, - Card, - CardContent, - CardHeader, - Divider, - Grid, - IconButton, - MenuItem, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import { DeleteOutlined } from '@mui/icons-material'; -import PropTypes from 'prop-types'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import createDataPipelineEnvironment from '../../api/DataPipeline/createDataPipelineEnvironment'; -import deleteDataPipelineEnvironment from '../../api/DataPipeline/deleteDataPipelineEnvironment'; -import updateDataPipelineEnvironment from '../../api/DataPipeline/updateDataPipelineEnvironment'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const PipelineEnvironmentEditForm = (props) => { - const { environmentOptions, triggerEnvSubmit, pipelineUri, pipeline, handleCountEnvironmentValid } = props; - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const [kvEnvs, setKeyValueEnvs] = useState([]); - const [envsToRemove, setEnvsToRemove] = useState([]); - const [environments, setEnvironments] = useState([]); - const [mapGroups, setMapGroups] = useState(new Map()) - const stageOps =[{value:"dev", label:"dev"},{value:"test", label:"test"},{value:"val", label:"val"},{value:"prod", label:"prod"},{value:"other", label:"other"}]; - const [environmentOps, setEnvironmentOps] = useState( - environmentOptions && environmentOptions.length > 0 ? environmentOptions : [{ environmentUri: 'someUri', label: 'some' },{ environmentUri: 'someUri', label: 'some2' }] - ); - - useEffect(() => { - if (client && pipeline) { - console.log("useeffect") - console.log(pipeline) - const environmentsSorted = pipeline.developmentEnvironments.nodes.sort((a, b) => { - return a.order - b.order; - }); - if (environmentsSorted) { - environmentsSorted.map((e) => (handleExistingEnvRow(e))) - } - } - }, [client, pipeline]); - - const fetchGroups = async (environment) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri: environment.environmentUri - }) - ); - - if (!response.errors) { - setMapGroups(new Map(mapGroups.set(environment.environmentUri, response.data.listEnvironmentGroups.nodes)) )//Array of groups (Objects) - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }; - - const handleExistingEnvRow = (e) => { - if (kvEnvs.length <= 40) { - const item = { - stage: e.stage, - env: e.environmentLabel, - environmentLabel: e.environmentLabel, - environmentUri: e.environmentUri, - envPipelineUri: e.envPipelineUri, - samlGroupName: e.samlGroupName, - team: e.samlGroupName, - AwsAccountId: e.AwsAccountId - }; - setEnvironments((prevState) => [...prevState, item]); - } else { - dispatch({ - type: SET_ERROR, - error: 'You cannot add more than 40 development stages' - }); - } - }; - - const handleAddEnvRow = () => { - if (kvEnvs.length <= 40) { - const item = { - stage: '', - env: '', - team: '' - }; - setKeyValueEnvs((prevState) => [...prevState, item]); - } else { - dispatch({ - type: SET_ERROR, - error: 'You cannot add more than 40 development stages' - }); - } - }; - - const handleChange = (idx, field) => (e) => { - const { value } = e.target; - - setKeyValueEnvs((prevstate) => { - const rows = [...prevstate]; - if (field === 'stage') { - rows[idx].stage = value; - } else if (field === 'env'){ - rows[idx].environmentLabel = value.label; - rows[idx].environmentUri = value.environmentUri; - } else{ - rows[idx].samlGroupName = value; - } - return rows; - }); - }; - - const handleRemoveEnvRow = (idx) => { - setKeyValueEnvs((prevstate) => { - const rows = [...prevstate]; - rows.splice(idx, 1); - return rows; - }); - }; - - const handleRemoveExistingEnvRow = (idx) => { - setEnvironments((prevstate) => { - const rows = [...prevstate]; - setEnvsToRemove((prevState) => [...prevState, rows[idx]]); - rows.splice(idx, 1); - return rows; - }); - }; - - async function submit(element, index) { - try { - const response = await client.mutate( - createDataPipelineEnvironment({ - input: { - stage: element.stage, - order: index + environments.length + 1, - pipelineUri: pipelineUri, - environmentLabel: element.environmentLabel, - environmentUri: element.environmentUri, - samlGroupName: element.samlGroupName - - } - }) - ); - if (!response.errors) { - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - async function update(element, index) { - try { - const response = await client.mutate( - updateDataPipelineEnvironment({ - input: { - stage: element.stage, - order: index + 1, - pipelineUri: pipelineUri, - environmentLabel: element.environmentLabel, - environmentUri: element.environmentUri, - samlGroupName: element.samlGroupName - } - }) - ); - if (!response.errors) { - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - async function deleteEnv(element, index) { - try { - const response = await client.mutate( - deleteDataPipelineEnvironment({ - envPipelineUri: element.envPipelineUri - }) - ); - if (!response.errors) { - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - useEffect(() => { - if (client && triggerEnvSubmit && pipelineUri && envsToRemove.length > 0) { - envsToRemove.forEach((element, index) => deleteEnv(element, index)) - } - if (client && triggerEnvSubmit && pipelineUri && environments.length > 0) { - environments.forEach((element, index) => update(element, index)) - } - if (client && triggerEnvSubmit && pipelineUri && kvEnvs.length > 0) { - kvEnvs.forEach((element, index) => submit(element, index)) - } - if (client && environmentOptions.length > 0) { - environmentOptions.forEach((element) => fetchGroups(element)) - } - }, [client, dispatch, triggerEnvSubmit, pipelineUri, environmentOptions]); - - useEffect(() => { - if ((kvEnvs.length + environments.length) > 0){ - handleCountEnvironmentValid(true) - }else{ - handleCountEnvironmentValid(false) - } - }, [kvEnvs.length, environments.length]); - - return ( - <> - - - - - - - - - - - - - - - - - {environments && environments.length > 0 && ( - - - Order - Development Stage - Environment - Team - AWS Account - - - )} - - {environments.map((item, idx) => ( - <> - - - - - - - - - - - - - - - - - - - - ))} - -
- { - handleRemoveExistingEnvRow(idx); - }} - > - - -
-
-
-
-
- - - - - - - - - - - - - - {kvEnvs && kvEnvs.length > 0 && ( - - - Order - Development Stage - Environment - Team - - - )} - - {kvEnvs.map((item, idx) => ( - <> - - - - - - - {stageOps.map((stage) => ( - - {stage.label} - - ))} - - - - - {environmentOps.map((environment) => ( - - {environment.label} - - ))} - - - - - {mapGroups.get(kvEnvs[idx].environmentUri) && (mapGroups.get(kvEnvs[idx].environmentUri).map((g) => ( - - {g.groupUri} - - )))} - - - - - - ))} - -
- { - handleRemoveEnvRow(idx); - }} - > - - -
- - - -
-
-
-
-
-
- - ); -}; -PipelineEnvironmentEditForm.propTypes = { - environmentOptions: PropTypes.array.isRequired, - triggerEnvSubmit: PropTypes.bool.isRequired, - pipelineUri: PropTypes.string.isRequired, - pipeline: PropTypes.object.isRequired, - handleCountEnvironmentValid: PropTypes.func.isRequired -}; -export default PipelineEnvironmentEditForm; diff --git a/frontend/src/views/Pipelines/PipelineEnvironments.js b/frontend/src/views/Pipelines/PipelineEnvironments.js deleted file mode 100644 index b3aefd47d..000000000 --- a/frontend/src/views/Pipelines/PipelineEnvironments.js +++ /dev/null @@ -1,103 +0,0 @@ -import React, {useCallback, useEffect, useState} from 'react'; -import { useSnackbar } from 'notistack'; -import { - Box, - Button, - Card, - CardContent, - CardHeader, - Divider, - Grid, - IconButton, - MenuItem, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import { DeleteOutlined } from '@mui/icons-material'; -import PropTypes from 'prop-types'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import * as Defaults from '../../components/defaults'; - -const PipelineEnvironments = (props) => { - const { pipeline } = props; - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const [environments, setEnvironments] = useState([]); - const [loading, setLoading] = useState(true); - - useEffect(() => { - if (client && pipeline) { - console.log("useeffect") - console.log(pipeline) - const environmentsSorted = pipeline.developmentEnvironments.nodes.sort((a, b) => { - return a.order - b.order; - }); - setEnvironments(environmentsSorted) - console.log(environments) - } - }, [client, pipeline]); - - return ( - <> - - - - - - - - - - - - - - - - - {environments > 0 && ( - - - Order - Development Stage - Environment - Team - AWS Account - - - )} - - {environments && (environments.map((e) => ( - <> - - {e.order} - {e.stage} - {e.environmentLabel} - {e.samlGroupName} - {e.AwsAccountId} - - - )))} - -
-
-
-
-
-
-
- - ); -}; -PipelineEnvironments.propTypes = { - pipeline: PropTypes.object.isRequired -}; -export default PipelineEnvironments; diff --git a/frontend/src/views/Pipelines/PipelineList.js b/frontend/src/views/Pipelines/PipelineList.js deleted file mode 100644 index 4ea57ba81..000000000 --- a/frontend/src/views/Pipelines/PipelineList.js +++ /dev/null @@ -1,224 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink } from 'react-router-dom'; -import { - Box, - Breadcrumbs, - Button, - Container, - Grid, - Divider, - Link, - Typography, - Autocomplete, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import PlusIcon from '../../icons/Plus'; -import useSettings from '../../hooks/useSettings'; -import SearchInput from '../../components/SearchInput'; -import Pager from '../../components/Pager'; -import PipelineListItem from './PipelineListItem'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import listDataPipelines from '../../api/DataPipeline/listDataPipelines'; -import ChipInput from '../../components/TagsInput'; -import { AwsRegions } from '../../constants'; - - -function PipelinesPageHeader() { - return ( - - - - Pipelines - - } - sx={{ mt: 1 }} - > - - Play - - - Pipelines - - - - - - - - - - ); -} - -const PipelineList = () => { - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const { settings } = useSettings(); - const [inputValue, setInputValue] = useState(''); - const [loading, setLoading] = useState(true); - const client = useClient(); - const devOptions =[{value:"cdk-trunk", label:"CDK Pipelines - Trunk-based"},{value:"trunk", label:"CodePipeline - Trunk-based"},{value:"gitflow", label:"CodePipeline - Gitflow"}];/*DBT Pipelines*/ - const [filterItems] = useState([{title:'DevStrategy', options: devOptions},{title:'Tags'},{title: 'Region', options: AwsRegions}]); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query(listDataPipelines(filter)); - if (!response.errors) { - setItems(response.data.listDataPipelines); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, filter]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - setFilter({...filter, page: 1, term: event.target.value}); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - setFilter({ ...filter, page: value }); - } - }; - - const handleFilterChange = (filterLabel, values) => { - if (filterLabel === "Region"){ - const selectedRegions = values.map((region) => region.value) - setFilter({ ...filter, region: selectedRegions}); - } else if (filterLabel === "Tags"){ - setFilter({ ...filter, tags: values }); - } else if (filterLabel === "DevStrategy"){ - const selectedTypes = values.map((type) => type.value) - setFilter({ ...filter, type: selectedTypes }) - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter, dispatch]); - - return ( - <> - - Pipelines | data.all - - - - - - - - - - {filterItems.map((item) => ( - - {item.title != 'Tags' - ? option.label} - onChange={(event, value) => handleFilterChange(item.title, value)} - renderInput={(regionParams) => ( - - )} - /> - : handleFilterChange(item.title, e)} - /> - } - - ))} - - - - - {loading ? ( - - ) : ( - - - {items.nodes.map((node) => ( - - ))} - - - - )} - - - - - ); -}; - -export default PipelineList; diff --git a/frontend/src/views/Pipelines/PipelineOverview.js b/frontend/src/views/Pipelines/PipelineOverview.js deleted file mode 100644 index 3f0938d69..000000000 --- a/frontend/src/views/Pipelines/PipelineOverview.js +++ /dev/null @@ -1,60 +0,0 @@ -import { Box, Grid } from '@mui/material'; -import PropTypes from 'prop-types'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; -import PipelineCICD from './PipelineCICD'; -import PipelineEnvironments from './PipelineEnvironments'; - -const PipelineOverview = (props) => { - const { pipeline, ...other } = props; - - return ( - - - - - - - - - - - - - - - - - - - - - - 0 ? pipeline.tags : ['-'] - } - /> - - - - - ); -}; - -PipelineOverview.propTypes = { - pipeline: PropTypes.object.isRequired -}; - -export default PipelineOverview; diff --git a/frontend/src/views/Shares/RevokeShareItemsModal.js b/frontend/src/views/Shares/RevokeShareItemsModal.js deleted file mode 100644 index ccff68e5d..000000000 --- a/frontend/src/views/Shares/RevokeShareItemsModal.js +++ /dev/null @@ -1,179 +0,0 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; -import { - Box, Card, - Dialog, - Divider, - IconButton, - Typography -} from '@mui/material'; -import {Add, SyncAlt} from '@mui/icons-material'; -import React, { useCallback, useEffect, useState } from 'react'; -import { useParams } from 'react-router-dom'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import getShareObject from '../../api/ShareObject/getShareObject'; -import revokeItemsShareObject from '../../api/ShareObject/revokeItemsShareObject'; -import {LoadingButton} from "@mui/lab"; -import {DataGrid} from "@mui/x-data-grid"; - -const RevokeShareItemsModal = (props) => { - const client = useClient(); - const { share, onApply, onClose, open, reloadSharedItems, ...other } = props; - const { enqueueSnackbar } = useSnackbar(); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [rows, setRows] = useState([]); - const dispatch = useDispatch(); - const params = useParams(); - const [loading, setLoading] = useState(true); - const [selectionModel, setSelectionModel] = useState([]); - const [pageSize, setPageSize] = useState(5); - - const fetchShareItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - getShareObject({ - shareUri: params.uri, - filter: { - ...filter, - pageSize: 1000, - isShared: true, - isRevokable: true - } - }) - ); - if (!response.errors) { - setRows( - response.data.getShareObject.items.nodes.map((item) => ({ - id: item.shareItemUri, - name: item.itemName, - type: item.itemType == "StorageLocation"? "Folder": "Table", - status: item.status - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, params.uri, filter]); - - - const revoke = async () => { - setLoading(true); - const response = await client.mutate( - revokeItemsShareObject({ - input: { - shareUri: share.shareUri, - revokedItemUris: selectionModel - } - }) - ); - if (!response.errors) { - enqueueSnackbar('Items revoked', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - await fetchShareItems(); - reloadSharedItems(true); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }; - - useEffect(() => { - if (client) { - fetchShareItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, fetchShareItems]); - - if (!share) { - return null; - } - if (!rows) { - return null; - } - const header = [ - { field: 'name', headerName: 'Name', width: 200, editable: false }, - { field: 'type', headerName: 'Type', width: 300, editable: false }, - { field: 'status', headerName: 'Status', width: 300, editable: false }, - ]; - - return ( - - - - Revoke access to items from share object {share.dataset.datasetName} - - - { - "After selecting the items that you want to revoke, click on Revoke Selected Items" - } - - - - - {!loading && rows.length > 0 ? ( - setPageSize(newPageSize)} - checkboxSelection - onSelectionModelChange={(newSelection) => { - setSelectionModel(newSelection); - }} - selectionModel={selectionModel} - /> - ) : ( - - No items to revoke. - - )} - - - } - sx={{ m: 1 }} - variant="outlined" - > - Revoke Selected Items - - - - - ); -}; - -RevokeShareItemsModal.propTypes = { - share: PropTypes.object.isRequired, - onApply: PropTypes.func, - onClose: PropTypes.func, - reloadSharedItems: PropTypes.func, - open: PropTypes.bool.isRequired -}; - -export default RevokeShareItemsModal; diff --git a/frontend/src/views/Shares/ShareInboxListItem.js b/frontend/src/views/Shares/ShareInboxListItem.js deleted file mode 100644 index 1a5562ada..000000000 --- a/frontend/src/views/Shares/ShareInboxListItem.js +++ /dev/null @@ -1,166 +0,0 @@ -import { - Box, - Button, - Card, - CardHeader, - Divider, - Grid, - Link, - Typography -} from '@mui/material'; -import { Link as RouterLink } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import ShareStatus from '../../components/ShareStatus'; -import TextAvatar from '../../components/TextAvatar'; -import useCardStyle from '../../hooks/useCardStyle'; - -const ShareInboxListItem = (props) => { - const { share, reload } = props; - const classes = useCardStyle(); - - return ( - - - - } - disableTypography - subheader={ - - - - - - | For{' '} - - {share.dataset.datasetName} - {' '} - | {share.created} - - - } - title={ - - {share.owner} - - } - /> - - - {`Read access to Dataset: ${share.dataset.datasetName} - for Principal: ${share.principal.principalName} - from Environment: ${share.principal.environmentName}`} - - - - - - - {`Currently shared items: ${share.statistics.sharedItems}`} - - - - - {`Revoked items: ${share.statistics.revokedItems}`} - - - - - {`Failed items: ${share.statistics.failedItems}`} - - - - - {`Pending items: ${share.statistics.pendingItems}`} - - - - - - - - - - - - ); -}; -ShareInboxListItem.propTypes = { - share: PropTypes.object.isRequired, - reload: PropTypes.func.isRequired -}; -export default ShareInboxListItem; diff --git a/frontend/src/views/Shares/ShareInboxTable.js b/frontend/src/views/Shares/ShareInboxTable.js deleted file mode 100644 index 3f2f76f55..000000000 --- a/frontend/src/views/Shares/ShareInboxTable.js +++ /dev/null @@ -1,150 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { - Box, - Card, - CardHeader, - Container, - Divider, - IconButton, - Link, - Table, - TableBody, - TableCell, - TableHead, - TableRow -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { Helmet } from 'react-helmet-async'; -import { Link as RouterLink } from 'react-router-dom'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import useSettings from '../../hooks/useSettings'; -import { useDispatch } from '../../store'; -import { SET_ERROR } from '../../store/errorReducer'; -import getShareRequestsToMe from '../../api/ShareObject/getShareRequestsToMe'; -import RefreshTableMenu from '../../components/RefreshTableMenu'; -import Scrollbar from '../../components/Scrollbar'; -import ArrowRightIcon from '../../icons/ArrowRight'; - -const ShareInboxTable = () => { - const dispatch = useDispatch(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter] = useState(Defaults.DefaultFilter); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const client = useClient(); - const fetchItems = useCallback(async () => { - setLoading(true); - await client - .query( - getShareRequestsToMe({ - filter: { - ...filter - } - }) - ) - .then((response) => { - setItems(response.data.getShareRequestsToMe); - }) - .catch((error) => { - dispatch({ type: SET_ERROR, error: error.Error }); - }) - .finally(() => setLoading(false)); - }, [filter, dispatch, client]); - - useEffect(() => { - if (client) { - fetchItems().catch((error) => { - dispatch({ type: SET_ERROR, error: error.message }); - }); - } - }, [client, filter.page, dispatch, fetchItems]); - - return ( - <> - - Shares Inbox | data.all - - - - - - } - title={Requests} - /> - - - - - - - Dataset - Requesters - AWS Account - Region - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((share) => ( - - - - {share.dataset.datasetName} - - - - {share.principal.principalName} - - - {share.principal.AwsAccountId} - - {share.principal.region} - - true}> - - - - - )) - ) : ( - - No requests found - - )} - - )} -
-
-
-
-
-
-
- - ); -}; - -export default ShareInboxTable; diff --git a/frontend/src/views/Shares/ShareOutboxListItem.js b/frontend/src/views/Shares/ShareOutboxListItem.js deleted file mode 100644 index bb00bf1b4..000000000 --- a/frontend/src/views/Shares/ShareOutboxListItem.js +++ /dev/null @@ -1,162 +0,0 @@ -import { - Box, - Button, - Card, - CardHeader, - Divider, - Grid, - Link, - Typography -} from '@mui/material'; -import { Link as RouterLink } from 'react-router-dom'; -import PropTypes from 'prop-types'; -import ShareStatus from '../../components/ShareStatus'; -import TextAvatar from '../../components/TextAvatar'; - -const ShareOutboxListItem = (props) => { - const { share, reload } = props; - return ( - - - - } - disableTypography - subheader={ - - - - - - | For{' '} - - {share.dataset.datasetName} - {' '} - | {share.created} - - - } - title={ - - {share.owner} - - } - /> - - - {`Read access to Dataset: ${share.dataset.datasetName} - for Principal: ${share.principal.principalName} - from Environment: ${share.principal.environmentName}`} - - - - - - - {`Currently shared items: ${share.statistics.sharedItems}`} - - - - - {`Revoked items: ${share.statistics.revokedItems}`} - - - - - {`Failed items: ${share.statistics.failedItems}`} - - - - - {`Pending items: ${share.statistics.pendingItems}`} - - - - - - - - - - - - ); -}; -ShareOutboxListItem.propTypes = { - share: PropTypes.object.isRequired, - reload: PropTypes.func.isRequired -}; -export default ShareOutboxListItem; diff --git a/frontend/src/views/Stack/StackStatus.js b/frontend/src/views/Stack/StackStatus.js deleted file mode 100644 index a332d2a11..000000000 --- a/frontend/src/views/Stack/StackStatus.js +++ /dev/null @@ -1,143 +0,0 @@ -import React, { useEffect } from 'react'; -import { useSnackbar } from 'notistack'; -import { - Box, - CircularProgress, - Grid, - IconButton, - Typography -} from '@mui/material'; -import { CancelRounded } from '@mui/icons-material'; -import PropTypes from 'prop-types'; -import { SET_ERROR } from '../../store/errorReducer'; -import useClient from '../../hooks/useClient'; -import getStack from '../../api/Stack/getStack'; -import { useDispatch } from '../../store'; - -const StackStatus = ({ stack, setStack, environmentUri }) => { - const { enqueueSnackbar, closeSnackbar } = useSnackbar(); - const client = useClient(); - const dispatch = useDispatch(); - - useEffect(() => { - closeSnackbar(); - if (stack) { - switch (stack.status) { - case 'CREATE_IN_PROGRESS': - case 'UPDATE_IN_PROGRESS': - case 'REVIEW_IN_PROGRESS': - case 'PENDING': - enqueueSnackbar( - - - - - - - - AWS CloudFormation stack deployment is in progress ! - - - - , - { - key: new Date().getTime() + Math.random(), - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'info', - persist: true, - action: (key) => ( - { - closeSnackbar(key); - }} - > - - - ) - } - ); - break; - case 'CREATE_FAILED': - case 'DELETE_COMPLETE': - case 'DELETE_FAILED': - case 'CREATE_ROLLBACK_COMPLETE': - enqueueSnackbar( - - An error occurred during the deployment of the AWS CloudFormation - stack. Stack status is {stack.status}. - , - { - key: new Date().getTime() + Math.random(), - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'error', - persist: true, - action: (key) => ( - { - closeSnackbar(key); - }} - > - - - ) - } - ); - break; - default: - closeSnackbar(); - break; - } - } - const fetchItem = async () => { - const response = await client.query( - getStack(environmentUri, stack.stackUri) - ); - if (!response.errors && response.data.getStack !== null) { - setStack(response.data.getStack); - } else { - const error = response.errors - ? response.errors[0].message - : 'AWS CloudFormation stack not found'; - dispatch({ type: SET_ERROR, error }); - } - }; - const interval = setInterval(() => { - if (client && stack) { - fetchItem().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, 10000); - return () => clearInterval(interval); - }, [ - client, - stack, - dispatch, - enqueueSnackbar, - closeSnackbar, - environmentUri, - setStack - ]); - - return <>; -}; -StackStatus.propTypes = { - stack: PropTypes.object.isRequired, - setStack: PropTypes.func.isRequired, - environmentUri: PropTypes.string.isRequired -}; -export default StackStatus; diff --git a/frontend/src/views/Tables/TableGlueProperties.js b/frontend/src/views/Tables/TableGlueProperties.js deleted file mode 100644 index 84bfa2e34..000000000 --- a/frontend/src/views/Tables/TableGlueProperties.js +++ /dev/null @@ -1,59 +0,0 @@ -import PropTypes from 'prop-types'; -import { - Box, - Card, - CardContent, - CardHeader, - Divider, - List, - ListItem, - Typography -} from '@mui/material'; -import React from 'react'; -import Scrollbar from '../../components/Scrollbar'; - -const TableGlueProperties = (props) => { - const { glueProperties, ...other } = props; - - return ( - - - - - - {glueProperties && ( - - - {Object.entries(JSON.parse(glueProperties)).map( - ([key, value]) => ( - - - {key} - - - {value} - - - ) - )} - - - )} - - - - ); -}; - -TableGlueProperties.propTypes = { - glueProperties: PropTypes.string.isRequired -}; - -export default TableGlueProperties; diff --git a/frontend/src/views/Warehouses/WarehouseConnection.js b/frontend/src/views/Warehouses/WarehouseConnection.js deleted file mode 100644 index f6c12b40e..000000000 --- a/frontend/src/views/Warehouses/WarehouseConnection.js +++ /dev/null @@ -1,104 +0,0 @@ -import PropTypes from 'prop-types'; -import { useState } from 'react'; -import { - Box, - Card, - CardContent, - CardHeader, - Divider, - Typography -} from '@mui/material'; -import { LoadingButton } from '@mui/lab'; -import { FaExternalLinkAlt } from 'react-icons/fa'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import getClusterConsoleAccess from '../../api/RedshiftCluster/getClusterConsoleAccess'; - -const WarehouseConnection = (props) => { - const { warehouse } = props; - const client = useClient(); - const dispatch = useDispatch(); - const [openingQueryEditor, setOpeningQueryEditor] = useState(false); - const jdbc = warehouse.endpoint - ? `jdbc:redshift://${warehouse.endpoint}:${warehouse.port}/${warehouse.databaseName}` - : '-'; - const odbc = warehouse.endpoint - ? `Driver={Amazon Redshift (x64)}; Server=${ - warehouse.endpoint || '-' - }; Database=${warehouse.databaseName}` - : '-'; - const goToQueryEditor = async () => { - setOpeningQueryEditor(true); - const response = await client.query( - getClusterConsoleAccess(warehouse.clusterUri) - ); - if (!response.errors) { - window.open(response.data.getRedshiftClusterConsoleAccess, '_blank'); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setOpeningQueryEditor(false); - }; - - return ( - - - - - - - - Endpoint - - - {warehouse.endpoint} - - - - - Port - - - {warehouse.port} - - - - - JDBC URL - - - {jdbc} - - - - - ODBC URL - - - {odbc} - - - - } - sx={{ mr: 1 }} - variant="contained" - onClick={goToQueryEditor} - > - Redshift Query Editor - - - - - ); -}; - -WarehouseConnection.propTypes = { - warehouse: PropTypes.object.isRequired -}; - -export default WarehouseConnection; diff --git a/frontend/src/views/Warehouses/WarehouseCopyTableModal.js b/frontend/src/views/Warehouses/WarehouseCopyTableModal.js deleted file mode 100644 index 70ee6104a..000000000 --- a/frontend/src/views/Warehouses/WarehouseCopyTableModal.js +++ /dev/null @@ -1,250 +0,0 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; -import { - Box, - CardContent, - Dialog, - FormHelperText, - MenuItem, - TextField, - Typography -} from '@mui/material'; -import { useCallback, useEffect, useState } from 'react'; -import { Formik } from 'formik'; -import * as Yup from 'yup'; -import { LoadingButton } from '@mui/lab'; -import { CopyAll } from '@mui/icons-material'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import { PagedResponseDefault } from '../../components/defaults'; -import listAvailableDatasetTables from '../../api/RedshiftCluster/listAvailableDatasetTables'; -import copyTableToCluster from '../../api/RedshiftCluster/copyTableToCluster'; -import * as Defaults from '../../components/defaults'; - -const WarehouseCopyTableModal = (props) => { - const client = useClient(); - const { warehouse, onApply, onClose, open, reload, ...other } = props; - const { enqueueSnackbar } = useSnackbar(); - const [filter] = useState(Defaults.SelectListFilter); - const [items, setItems] = useState(PagedResponseDefault); - const [itemOptions, setItemOptions] = useState([]); - const [selectedTable, setSelectedTable] = useState(''); - const dispatch = useDispatch(); - const [loading, setLoading] = useState(true); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listAvailableDatasetTables({ - clusterUri: warehouse.clusterUri, - filter - }) - ); - if (!response.errors) { - setItems({ ...response.data.listRedshiftClusterAvailableDatasetTables }); - setItemOptions( - response.data.listRedshiftClusterAvailableDatasetTables.nodes.map( - (e) => ({ ...e, value: e, label: e.label }) - ) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, warehouse.clusterUri, filter]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const input = { - clusterUri: warehouse.clusterUri, - datasetUri: values.table.datasetUri, - tableUri: values.table.tableUri, - schema: values.schema, - dataLocation: values.dataLocation || null - }; - const response = await client.mutate(copyTableToCluster(input)); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Table copy started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - if (reload) { - reload(); - } - if (onApply) { - onApply(); - } - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, fetchItems, dispatch]); - - if (!warehouse) { - return null; - } - - return ( - - - - Copy a table to cluster {warehouse.label} - - -

- You can specify the target schema and the S3 data location for the - copy command. This copy will be done on cluster{' '} - {warehouse.name} - and database {warehouse.databaseName} -

-
- {!loading && items && items.nodes.length <= 0 ? ( - - No tables found. - - ) : ( - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - setFieldValue, - isSubmitting, - touched, - values - }) => ( -
- - - - - - { - setFieldValue('table', event.target.value); - setSelectedTable( - `(s3://${event.target.value.dataset.S3BucketName}/)` - ); - }} - select - value={values.table} - variant="outlined" - > - {itemOptions.map((table) => ( - - {table.label} - - ))} - - - - - - - {errors.submit && ( - - {errors.submit} - - )} - - } - color="primary" - disabled={isSubmitting} - type="submit" - variant="contained" - > - Copy table - - -
- )} -
-
- )} -
-
- ); -}; - -WarehouseCopyTableModal.propTypes = { - warehouse: PropTypes.object.isRequired, - onApply: PropTypes.func, - onClose: PropTypes.func, - reload: PropTypes.func, - open: PropTypes.bool.isRequired -}; - -export default WarehouseCopyTableModal; diff --git a/frontend/src/views/Warehouses/WarehouseCreateForm.js b/frontend/src/views/Warehouses/WarehouseCreateForm.js deleted file mode 100644 index 1ea77cc7f..000000000 --- a/frontend/src/views/Warehouses/WarehouseCreateForm.js +++ /dev/null @@ -1,619 +0,0 @@ -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - MenuItem, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import { useCallback, useEffect, useState } from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import createRedshiftCluster from '../../api/RedshiftCluster/createCluster'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const WarehouseCreateForm = (props) => { - const navigate = useNavigate(); - const params = useParams(); - const { enqueueSnackbar } = useSnackbar(); - const dispatch = useDispatch(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [groupOptions, setGroupOptions] = useState([]); - const [environmentOptions, setEnvironmentOptions] = useState([]); - const [environment, setEnvironment] = useState(null); - const nodeTypes = [ - { label: 'dc2.large', value: 'dc2.large' }, - { label: 'ds2.xlarge', value: 'ds2.xlarge' }, - { label: 'ds2.8xlarge', value: 'ds2.8xlarge' }, - { label: 'dc1.large', value: 'dc1.large' }, - { label: 'dc2.8xlarge', value: 'dc2.8xlarge' }, - { label: 'ra3.16xlarge', value: 'ra3.16xlarge' } - ]; - - const fetchEnvironments = useCallback(async () => { - setLoading(true); - const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) - ); - if (!response.errors) { - setEnvironmentOptions( - response.data.listEnvironments.nodes.map((e) => ({ - ...e, - value: e.environmentUri, - label: e.label - })) - ); - setEnvironment( - response.data.listEnvironments.nodes[ - response.data.listEnvironments.nodes.findIndex( - (e) => e.environmentUri === params.uri - ) - ] - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, params.uri]); - - const fetchGroups = useCallback( - async (environmentUri) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri - }) - ); - if (!response.errors) { - setGroupOptions( - response.data.listEnvironmentGroups.nodes.map((g) => ({ - value: g.groupUri, - label: g.groupUri - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }, - [client, dispatch] - ); - - useEffect(() => { - if (client) { - fetchEnvironments().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, fetchEnvironments, dispatch]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const input = { - label: values.label, - description: values.description, - vpc: values.vpcId, - tags: values.tags, - nodeType: values.nodeType, - masterDatabaseName: values.masterDatabaseName, - masterUsername: values.masterUsername, - numberOfNodes: parseInt(values.numberOfNodes, 10), - SamlGroupName: values.SamlGroupName, - databaseName: values.databaseName - }; - const response = await client.mutate( - createRedshiftCluster({ - environmentUri: values.environment.environmentUri, - input - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Amazon Redshift cluster creation started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/warehouse/${response.data.createRedshiftCluster.clusterUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - if (loading || !environmentOptions.length > 0 || !environment) { - return ; - } - - return ( - <> - - Warehouses: Warehouse Create | data.all - - - - - - - Create a new warehouse - - } - sx={{ mt: 1 }} - > - - Organize - - - Environments - - - {environment.label} - - - Warehouses - - - Create - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - - - - - - - - - - - - - - - - - environmentOptions[ - environmentOptions.findIndex( - (e) => e.environmentUri === params.uri - ) - ] - } - onChange={(event) => { - setFieldValue('SamlGroupName', ''); - fetchGroups( - event.target.value.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - setFieldValue('environment', event.target.value); - }} - select - value={values.environment} - variant="outlined" - > - {environmentOptions.map((e) => ( - - {e.label} - - ))} - - - - - - - - - - - - - - {nodeTypes.map((node) => ( - - {node.label} - - ))} - - - - - - - - - - - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - {errors.submit && ( - - {errors.submit} - - )} - - - Create Warehouse - - - - -
- )} -
-
-
-
- - ); -}; - -export default WarehouseCreateForm; diff --git a/frontend/src/views/Warehouses/WarehouseCredentials.js b/frontend/src/views/Warehouses/WarehouseCredentials.js deleted file mode 100644 index a47221c2a..000000000 --- a/frontend/src/views/Warehouses/WarehouseCredentials.js +++ /dev/null @@ -1,122 +0,0 @@ -import PropTypes from 'prop-types'; -import { useCallback, useEffect, useState } from 'react'; -import { - Card, - CardContent, - CardHeader, - Divider, - List, - ListItem, - Typography -} from '@mui/material'; -import useClient from '../../hooks/useClient'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import getRedshiftClusterDatabaseCredentials from '../../api/RedshiftCluster/getClusterDatabaseCredentials'; - -const WarehouseCredentials = (props) => { - const { warehouse } = props; - const client = useClient(); - const dispatch = useDispatch(); - const [clusterCredentials, setClusterCredentials] = useState({ - password: '-' - }); - - const getCredentials = useCallback(async () => { - const response = await client.query( - getRedshiftClusterDatabaseCredentials(warehouse.clusterUri) - ); - if (!response.errors) { - setClusterCredentials({ - ...response.data.getRedshiftClusterDatabaseCredentials - }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, [client, warehouse.clusterUri, dispatch]); - - useEffect(() => { - if (client && warehouse) { - getCredentials().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, warehouse, getCredentials, dispatch]); - - return ( - - - - - - - - Cluster identifier - - - {warehouse.name} - - - - - Database name - - - {warehouse.databaseName} - - - - - Database user - - - {warehouse.databaseUser} - - - - - Database password - - - {clusterCredentials?.password || '-'} - - - - - - ); -}; - -WarehouseCredentials.propTypes = { - warehouse: PropTypes.object.isRequired -}; - -export default WarehouseCredentials; diff --git a/frontend/src/views/Warehouses/WarehouseDatasets.js b/frontend/src/views/Warehouses/WarehouseDatasets.js deleted file mode 100644 index b7e820f93..000000000 --- a/frontend/src/views/Warehouses/WarehouseDatasets.js +++ /dev/null @@ -1,292 +0,0 @@ -import PropTypes from 'prop-types'; -import { useCallback, useEffect, useState } from 'react'; -import { - Box, - Card, - CardContent, - CardHeader, - Divider, - Grid, - IconButton, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField, - Typography -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { DeleteOutlined, Warning } from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; -import { useSnackbar } from 'notistack'; -import { BsFolder } from 'react-icons/bs'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import Scrollbar from '../../components/Scrollbar'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import SearchIcon from '../../icons/Search'; -import PlusIcon from '../../icons/Plus'; -import DeleteObjectModal from '../../components/DeleteObjectModal'; -import removeDatasetFromCluster from '../../api/RedshiftCluster/removeDatasetFromCluster'; -import WarehouseLoadDatasetModal from './WarehouseLoadDatasetModal'; -import Pager from '../../components/Pager'; -import listClusterDatasets from '../../api/RedshiftCluster/listClusterDatasets'; -import WarehouseTables from './WarehouseTables'; - -const WarehouseDatasets = ({ warehouse }) => { - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(null); - const [inputValue, setInputValue] = useState(''); - const [isLoadDatasetsOpen, setIsLoadDatasetsOpen] = useState(false); - const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); - const [datasetToDelete, setDatasetToDelete] = useState(null); - const handleDeleteObjectModalOpen = (dataset) => { - setDatasetToDelete(dataset); - setIsDeleteObjectModalOpen(true); - }; - const handleDeleteObjectModalClose = () => { - setDatasetToDelete(null); - setIsDeleteObjectModalOpen(false); - }; - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listClusterDatasets({ - clusterUri: warehouse.clusterUri, - filter: { ...filter } - }) - ); - if (!response.errors) { - setItems({ ...response.data.listRedshiftClusterDatasets }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [warehouse.clusterUri, client, dispatch, filter]); - - const handleLoadDatasetsModalOpen = () => { - setIsLoadDatasetsOpen(true); - }; - - const handleLoadDatasetsModalClose = () => { - setIsLoadDatasetsOpen(false); - }; - - const unloadDataset = useCallback(async () => { - const response = await client.mutate( - removeDatasetFromCluster({ - clusterUri: warehouse.clusterUri, - datasetUri: datasetToDelete.datasetUri - }) - ); - if (!response.errors) { - handleDeleteObjectModalClose(); - enqueueSnackbar('Dataset unloaded', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, [ - warehouse.clusterUri, - enqueueSnackbar, - fetchItems, - dispatch, - client, - datasetToDelete - ]); - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, filter.page, fetchItems, dispatch]); - - return ( - - - } - sx={{ m: 1 }} - variant="outlined" - > - Load dataset - - } - title={ - - - Loaded Datasets - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - - - - - - Name - S3 Bucket - Glue Database - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((dataset) => ( - - {dataset.name} - {`s3://${dataset.S3BucketName}`} - {dataset.GlueDatabaseName} - - { - setDatasetToDelete(dataset); - handleDeleteObjectModalOpen(dataset); - }} - > - - - - - )) - ) : ( - - No datasets loaded to cluster. - - )} - - )} -
- {items.nodes.length > 0 && ( - - )} -
-
-
- - {isLoadDatasetsOpen && ( - - )} - - {datasetToDelete && ( - - - - Dataset Spectrum schema will be removed from the - cluster. - - - - } - /> - )} - - - -
- ); -}; - -WarehouseDatasets.propTypes = { - warehouse: PropTypes.object.isRequired -}; - -export default WarehouseDatasets; diff --git a/frontend/src/views/Warehouses/WarehouseEditForm.js b/frontend/src/views/Warehouses/WarehouseEditForm.js deleted file mode 100644 index 13c7977a0..000000000 --- a/frontend/src/views/Warehouses/WarehouseEditForm.js +++ /dev/null @@ -1,357 +0,0 @@ -import { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - MenuItem, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import useClient from '../../hooks/useClient'; -import useGroups from '../../hooks/useGroups'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import getCluster from '../../api/RedshiftCluster/getCluster'; - -const WarehouseEditForm = (props) => { - const dispatch = useDispatch(); - const navigate = useNavigate(); - const params = useParams(); - const { enqueueSnackbar } = useSnackbar(); - const client = useClient(); - const groups = useGroups(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [warehouse, setWarehouse] = useState(null); - const groupOptions = groups - ? groups.map((g) => ({ value: g, label: g })) - : []; - - const fetchItem = useCallback(async () => { - setLoading(true); - const response = await client.query(getCluster(params.uri)); - if (!response.errors && response.data.get !== null) { - setWarehouse(response.data.getRedshiftCluster); - } else { - const error = response.errors - ? response.errors[0].message - : 'Warehouse not found'; - dispatch({ type: SET_ERROR, error }); - } - setLoading(false); - }, [client, dispatch, params.uri]); - - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, fetchItem, dispatch]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const response = {}; - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Warehouse updated', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/warehouse/${response.data.updateSqlWarehouse.clusterUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - - if (loading || !(warehouse && warehouse.environment)) { - return ; - } - - return ( - <> - - Dataset: Warehouse Update | data.all - - - - - - - Edit warehouse {warehouse.label} - - } - sx={{ mt: 1 }} - > - - Discover - - - Warehouses - - - {warehouse.label} - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - { - setFieldValue( - 'SamlGroupName', - event.target.value - ); - }} - select - value={values.SamlGroupName} - variant="outlined" - > - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - - - - - - - - - - - - - - - - - Save - - - - -
- )} -
-
-
-
- - ); -}; - -export default WarehouseEditForm; diff --git a/frontend/src/views/Warehouses/WarehouseImportForm.js b/frontend/src/views/Warehouses/WarehouseImportForm.js deleted file mode 100644 index 8c6ad3b32..000000000 --- a/frontend/src/views/Warehouses/WarehouseImportForm.js +++ /dev/null @@ -1,490 +0,0 @@ -import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom'; -import * as Yup from 'yup'; -import { Formik } from 'formik'; -import { useSnackbar } from 'notistack'; -import { - Box, - Breadcrumbs, - Button, - Card, - CardContent, - CardHeader, - CircularProgress, - Container, - FormHelperText, - Grid, - Link, - MenuItem, - TextField, - Typography -} from '@mui/material'; -import { Helmet } from 'react-helmet-async'; -import { LoadingButton } from '@mui/lab'; -import { useCallback, useEffect, useState } from 'react'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import ArrowLeftIcon from '../../icons/ArrowLeft'; -import useSettings from '../../hooks/useSettings'; -import listEnvironments from '../../api/Environment/listEnvironments'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import ChipInput from '../../components/TagsInput'; -import importRedshiftCluster from '../../api/RedshiftCluster/importCluster'; -import listEnvironmentGroups from '../../api/Environment/listEnvironmentGroups'; -import * as Defaults from '../../components/defaults'; - -const WarehouseCreateForm = (props) => { - const navigate = useNavigate(); - const params = useParams(); - const { enqueueSnackbar } = useSnackbar(); - const dispatch = useDispatch(); - const client = useClient(); - const { settings } = useSettings(); - const [loading, setLoading] = useState(true); - const [groupOptions, setGroupOptions] = useState([]); - const [environmentOptions, setEnvironmentOptions] = useState([]); - const [environment, setEnvironment] = useState(null); - - const fetchEnvironments = useCallback(async () => { - setLoading(true); - const response = await client.query( - listEnvironments({ filter: Defaults.SelectListFilter }) - ); - if (!response.errors) { - setEnvironmentOptions( - response.data.listEnvironments.nodes.map((e) => ({ - ...e, - value: e.environmentUri, - label: e.label - })) - ); - setEnvironment( - response.data.listEnvironments.nodes[ - response.data.listEnvironments.nodes.findIndex( - (e) => e.environmentUri === params.uri - ) - ] - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, params.uri]); - - const fetchGroups = useCallback( - async (environmentUri) => { - try { - const response = await client.query( - listEnvironmentGroups({ - filter: Defaults.SelectListFilter, - environmentUri - }) - ); - if (!response.errors) { - setGroupOptions( - response.data.listEnvironmentGroups.nodes.map((g) => ({ - value: g.groupUri, - label: g.groupUri - })) - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (e) { - dispatch({ type: SET_ERROR, error: e.message }); - } - }, - [client, dispatch] - ); - - useEffect(() => { - if (client) { - fetchEnvironments().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, fetchEnvironments, dispatch]); - - async function submit(values, setStatus, setSubmitting, setErrors) { - try { - const input = { - label: values.label, - description: values.description, - clusterIdentifier: values.clusterIdentifier, - tags: values.tags, - SamlGroupName: values.SamlGroupName, - databaseName: values.databaseName - }; - const response = await client.mutate( - importRedshiftCluster({ - environmentUri: values.environment.environmentUri, - input - }) - ); - if (!response.errors) { - setStatus({ success: true }); - setSubmitting(false); - enqueueSnackbar('Amazon Redshift cluster import started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - navigate( - `/console/warehouse/${response.data.importRedshiftCluster.clusterUri}` - ); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - } catch (err) { - console.error(err); - setStatus({ success: false }); - setErrors({ submit: err.message }); - setSubmitting(false); - dispatch({ type: SET_ERROR, error: err.message }); - } - } - if (loading || !environmentOptions.length > 0 || !environment) { - return ; - } - - return ( - <> - - Warehouses: Warehouse Import | data.all - - - - - - - Import warehouse - - } - sx={{ mt: 1 }} - > - - Organize - - - Environments - - - {environment.label} - - - - - - - - - - - { - await submit(values, setStatus, setSubmitting, setErrors); - }} - > - {({ - errors, - handleBlur, - handleChange, - handleSubmit, - isSubmitting, - setFieldValue, - touched, - values - }) => ( -
- - - - - - - - - - {touched.description && errors.description && ( - - - {errors.description} - - - )} - - - - - - - - - - - - - - - - - - - environmentOptions[ - environmentOptions.findIndex( - (e) => e.environmentUri === params.uri - ) - ] - } - onChange={(event) => { - setFieldValue('SamlGroupName', ''); - fetchGroups( - event.target.value.environmentUri - ).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - setFieldValue('environment', event.target.value); - }} - select - value={values.environment} - variant="outlined" - > - {environmentOptions.map((e) => ( - - {e.label} - - ))} - - - - - - - - - - - - - - {groupOptions.map((group) => ( - - {group.label} - - ))} - - - - - { - setFieldValue('tags', [...chip]); - }} - /> - - - - {errors.submit && ( - - {errors.submit} - - )} - - - Import Warehouse - - - - -
- )} -
-
-
-
- - ); -}; - -export default WarehouseCreateForm; diff --git a/frontend/src/views/Warehouses/WarehouseLoadDatasetModal.js b/frontend/src/views/Warehouses/WarehouseLoadDatasetModal.js deleted file mode 100644 index 561ee5090..000000000 --- a/frontend/src/views/Warehouses/WarehouseLoadDatasetModal.js +++ /dev/null @@ -1,193 +0,0 @@ -import PropTypes from 'prop-types'; -import { useSnackbar } from 'notistack'; -import { - Box, - Dialog, - IconButton, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - Typography -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { useCallback, useEffect, useState } from 'react'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import useClient from '../../hooks/useClient'; -import Scrollbar from '../../components/Scrollbar'; -import Pager from '../../components/Pager'; -import * as Defaults from '../../components/defaults'; -import { PagedResponseDefault } from '../../components/defaults'; -import listAvailableDatasets from '../../api/RedshiftCluster/listAvailableDatasets'; -import addDatasetToCluster from '../../api/RedshiftCluster/addDatasetToCluster'; -import PlusIcon from '../../icons/Plus'; - -const WarehouseLoadDatasetModal = (props) => { - const client = useClient(); - const { warehouse, onApply, onClose, open, reload, ...other } = props; - const { enqueueSnackbar } = useSnackbar(); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [items, setItems] = useState(PagedResponseDefault); - const dispatch = useDispatch(); - const [loading, setLoading] = useState(true); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listAvailableDatasets({ - clusterUri: warehouse.clusterUri, - filter: { - ...filter - } - }) - ); - if (!response.errors) { - setItems({ ...response.data.listRedshiftClusterAvailableDatasets }); - reload(); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [warehouse.clusterUri, client, dispatch, filter, reload]); - - const loadDataset = useCallback( - async (dataset) => { - const response = await client.mutate( - addDatasetToCluster({ - clusterUri: warehouse.clusterUri, - datasetUri: dataset.datasetUri - }) - ); - if (!response.errors) { - enqueueSnackbar('Dataset loading to cluster started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - await fetchItems(); - reload(true); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }, - [ - client, - dispatch, - enqueueSnackbar, - reload, - warehouse.clusterUri, - fetchItems - ] - ); - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, isShared: true, page: value }); - } - }; - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, fetchItems, dispatch]); - - if (!warehouse) { - return null; - } - - return ( - - - - Load datasets to cluster {warehouse.label} - - - Dataset will be loaded from Amazon S3 to Amazon Redshift using - Redshift Spectrum - - {!loading && items && items.nodes.length <= 0 ? ( - - No items to add. - - ) : ( - - - - - - Name - AWS Account - Region - S3 Bucket - Glue Database - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((dataset) => ( - - {dataset.name} - {dataset.AwsAccountId} - {dataset.region} - - {`s3://${dataset.S3BucketName}`} - - {dataset.GlueDatabaseName} - - { - loadDataset(dataset); - }} - > - - - - - )) - ) : ( - - No datasets found - - )} - - )} -
- -
-
- )} -
-
- ); -}; - -WarehouseLoadDatasetModal.propTypes = { - warehouse: PropTypes.object.isRequired, - onApply: PropTypes.func, - onClose: PropTypes.func, - reload: PropTypes.func, - open: PropTypes.bool.isRequired -}; - -export default WarehouseLoadDatasetModal; diff --git a/frontend/src/views/Warehouses/WarehouseOverview.js b/frontend/src/views/Warehouses/WarehouseOverview.js deleted file mode 100644 index ff22a5ff7..000000000 --- a/frontend/src/views/Warehouses/WarehouseOverview.js +++ /dev/null @@ -1,56 +0,0 @@ -import { Box, Grid } from '@mui/material'; -import PropTypes from 'prop-types'; -import ObjectBrief from '../../components/ObjectBrief'; -import ObjectMetadata from '../../components/ObjectMetadata'; -import WarehouseConnection from './WarehouseConnection'; -import WarehouseCredentials from './WarehouseCredentials'; - -const WarehouseOverview = (props) => { - const { warehouse, ...other } = props; - - return ( - - - - 0 - ? warehouse.tags - : ['-'] - } - /> - - - - - - - {' '} - - - - - - - - - ); -}; - -WarehouseOverview.propTypes = { - warehouse: PropTypes.object.isRequired -}; - -export default WarehouseOverview; diff --git a/frontend/src/views/Warehouses/WarehouseTables.js b/frontend/src/views/Warehouses/WarehouseTables.js deleted file mode 100644 index 7f589d8c3..000000000 --- a/frontend/src/views/Warehouses/WarehouseTables.js +++ /dev/null @@ -1,250 +0,0 @@ -import PropTypes from 'prop-types'; -import { useCallback, useEffect, useState } from 'react'; -import { - Box, - Card, - CardHeader, - Divider, - Grid, - IconButton, - InputAdornment, - Table, - TableBody, - TableCell, - TableHead, - TableRow, - TextField -} from '@mui/material'; -import CircularProgress from '@mui/material/CircularProgress'; -import { DeleteOutlined } from '@mui/icons-material'; -import { LoadingButton } from '@mui/lab'; -import { useSnackbar } from 'notistack'; -import { BsTable } from 'react-icons/bs'; -import useClient from '../../hooks/useClient'; -import * as Defaults from '../../components/defaults'; -import Scrollbar from '../../components/Scrollbar'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import SearchIcon from '../../icons/Search'; -import PlusIcon from '../../icons/Plus'; -import Pager from '../../components/Pager'; -import listClusterDatasetTables from '../../api/RedshiftCluster/listClusterDatasetTables'; -import WarehouseCopyTableModal from './WarehouseCopyTableModal'; -import disableRedshiftClusterDatasetCopy from '../../api/RedshiftCluster/disableClusterDatasetCopy'; - -const WarehouseTables = ({ warehouse }) => { - const client = useClient(); - const dispatch = useDispatch(); - const { enqueueSnackbar } = useSnackbar(); - const [items, setItems] = useState(Defaults.PagedResponseDefault); - const [filter, setFilter] = useState(Defaults.DefaultFilter); - const [loading, setLoading] = useState(null); - const [inputValue, setInputValue] = useState(''); - const [isCopyTablesOpen, setIsLoadDatasetsOpen] = useState(false); - - const fetchItems = useCallback(async () => { - setLoading(true); - const response = await client.query( - listClusterDatasetTables({ - clusterUri: warehouse.clusterUri, - filter - }) - ); - if (!response.errors) { - setItems({ ...response.data.listRedshiftClusterCopyEnabledTables }); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setLoading(false); - }, [client, dispatch, filter, warehouse.clusterUri]); - - const handleCopyTablesModalOpen = () => { - setIsLoadDatasetsOpen(true); - }; - - const handleCopyTablesModalClose = () => { - setIsLoadDatasetsOpen(false); - }; - - const handleInputChange = (event) => { - setInputValue(event.target.value); - setFilter({ ...filter, term: event.target.value }); - }; - - const handleInputKeyup = (event) => { - if (event.code === 'Enter') { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }; - - const handlePageChange = async (event, value) => { - if (value <= items.pages && value !== items.page) { - await setFilter({ ...filter, page: value }); - } - }; - - const disableCopy = useCallback( - async (table) => { - const res = await client.mutate( - disableRedshiftClusterDatasetCopy({ - clusterUri: warehouse.clusterUri, - datasetUri: table.datasetUri, - tableUri: table.tableUri - }) - ); - if (!res.errors) { - enqueueSnackbar('Table copy disabled', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - await fetchItems(); - } else { - dispatch({ type: SET_ERROR, error: res.errors[0].message }); - } - }, - [client, enqueueSnackbar, dispatch, warehouse.clusterUri, fetchItems] - ); - - useEffect(() => { - if (client) { - fetchItems().catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - } - }, [client, dispatch, filter.page, fetchItems]); - - return ( - - - } - sx={{ m: 1 }} - variant="outlined" - > - Copy table - - } - title={ - - - Tables copied from loaded datasets - - } - /> - - - - - - - - ) - }} - onChange={handleInputChange} - onKeyUp={handleInputKeyup} - placeholder="Search" - value={inputValue} - variant="outlined" - /> - - - - - - - - - Name - Schema - Location - Actions - - - {loading ? ( - - ) : ( - - {items.nodes.length > 0 ? ( - items.nodes.map((table) => ( - - {table.name} - {table.RedshiftSchema} - {table.RedshiftCopyDataLocation} - - { - disableCopy(table).catch((e) => - dispatch({ type: SET_ERROR, error: e.message }) - ); - }} - > - - - - - )) - ) : ( - - No tables found. - - )} - - )} -
- {items.nodes.length > 0 && ( - - )} -
-
-
- - {isCopyTablesOpen && ( - - )} -
- ); -}; - -WarehouseTables.propTypes = { - warehouse: PropTypes.object.isRequired -}; - -export default WarehouseTables; diff --git a/frontend/src/views/Warehouses/WarehouseView.js b/frontend/src/views/Warehouses/WarehouseView.js deleted file mode 100644 index 32c143244..000000000 --- a/frontend/src/views/Warehouses/WarehouseView.js +++ /dev/null @@ -1,343 +0,0 @@ -import React, { useCallback, useEffect, useState } from 'react'; -import { Link as RouterLink, useParams } from 'react-router-dom'; -import { Helmet } from 'react-helmet-async'; -import { - Box, - Breadcrumbs, - Button, - CircularProgress, - Container, - Divider, - Grid, - Link, - Tab, - Tabs, - Typography -} from '@mui/material'; -import { FaAws, FaTrash } from 'react-icons/fa'; -import { useNavigate } from 'react-router'; -import * as PropTypes from 'prop-types'; -import { - Folder, - Info, - LocalOffer, - PauseOutlined, - PlayArrowOutlined -} from '@mui/icons-material'; -import { useSnackbar } from 'notistack'; -import { LoadingButton } from '@mui/lab'; -import useSettings from '../../hooks/useSettings'; -import useClient from '../../hooks/useClient'; -import ChevronRightIcon from '../../icons/ChevronRight'; -import Stack from '../Stack/Stack'; -import { SET_ERROR } from '../../store/errorReducer'; -import { useDispatch } from '../../store'; -import WarehouseOverview from './WarehouseOverview'; -import DeleteObjectWithFrictionModal from '../../components/DeleteObjectWithFrictionModal'; -import deleteRedshiftCluster from '../../api/RedshiftCluster/deleteCluster'; -import getCluster from '../../api/RedshiftCluster/getCluster'; -import pauseRedshiftCluster from '../../api/RedshiftCluster/pauseCluster'; -import resumeRedshiftCluster from '../../api/RedshiftCluster/resumeCluster'; -import WarehouseDatasets from './WarehouseDatasets'; -import StackStatus from '../Stack/StackStatus'; -import KeyValueTagList from '../KeyValueTags/KeyValueTagList'; - -const tabs = [ - { label: 'Overview', value: 'overview', icon: }, - { label: 'Datasets', value: 'datasets', icon: }, - { label: 'Tags', value: 'tags', icon: }, - { label: 'Stack', value: 'stack', icon: } -]; -function WarehouseViewPageHeader({ - warehouse, - deleteCluster, - pauseCluster, - resumeCluster, - resumeLoader, - pauseLoader -}) { - return ( - - - - Warehouse {warehouse.label} - - } - sx={{ mt: 1 }} - > - - Organize - - - Environments - - - {warehouse.environment.label} - - - {warehouse.label} - - - - - - {resumeCluster && ( - } - sx={{ mt: 1, mr: 1 }} - onClick={resumeCluster} - type="button" - variant="outlined" - > - Resume - - )} - {pauseCluster && ( - } - sx={{ mt: 1, mr: 1 }} - onClick={pauseCluster} - type="button" - variant="outlined" - > - Pause - - )} - - - - - ); -} - -WarehouseViewPageHeader.propTypes = { - warehouse: PropTypes.object.isRequired, - deleteCluster: PropTypes.func.isRequired, - pauseCluster: PropTypes.func.isRequired, - resumeCluster: PropTypes.func.isRequired, - resumeLoader: PropTypes.bool.isRequired, - pauseLoader: PropTypes.bool.isRequired -}; -const WarehouseView = () => { - const dispatch = useDispatch(); - const { settings } = useSettings(); - const { enqueueSnackbar } = useSnackbar(); - const params = useParams(); - const client = useClient(); - const navigate = useNavigate(); - const [currentTab, setCurrentTab] = useState('overview'); - const [loading, setLoading] = useState(true); - const [warehouse, setWarehouse] = useState(null); - const [stack, setStack] = useState(null); - const [showResumeCluster, setShowResumeCluster] = useState(false); - const [showPauseCluster, setShowPauseCluster] = useState(false); - const [isDeleteObjectModalOpen, setIsDeleteObjectModalOpen] = useState(false); - const handleDeleteObjectModalOpen = () => { - setIsDeleteObjectModalOpen(true); - }; - - const handleDeleteObjectModalClose = () => { - setIsDeleteObjectModalOpen(false); - }; - - const fetchItem = useCallback(async () => { - setLoading(true); - const response = await client.query(getCluster(params.uri)); - if (!response.errors && response.data.getRedshiftCluster !== null) { - setWarehouse(response.data.getRedshiftCluster); - if (stack) { - setStack(response.data.getRedshiftCluster.stack); - } - } else { - const error = response.errors - ? response.errors[0].message - : 'Warehouse not found'; - dispatch({ type: SET_ERROR, error }); - } - setLoading(false); - }, [client, dispatch, params.uri, stack]); - useEffect(() => { - if (client) { - fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message })); - } - }, [client, fetchItem, dispatch]); - - const handleTabsChange = (event, value) => { - setCurrentTab(value); - }; - - const deleteCluster = async (deleteFromAWS = false) => { - const response = await client.mutate( - deleteRedshiftCluster(warehouse.clusterUri, deleteFromAWS) - ); - if (!response.errors) { - handleDeleteObjectModalClose(); - navigate(`/console/environments/${warehouse.environment.environmentUri}`); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - }; - - const pauseCluster = async () => { - const response = await client.mutate( - pauseRedshiftCluster(warehouse.clusterUri) - ); - if (!response.errors) { - enqueueSnackbar('Amazon Redshift cluster pause started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - await fetchItem(); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setShowPauseCluster(false); - }; - const resumeCluster = async () => { - const response = await client.mutate( - resumeRedshiftCluster(warehouse.clusterUri) - ); - if (!response.errors) { - enqueueSnackbar('Amazon Redshift cluster resume started', { - anchorOrigin: { - horizontal: 'right', - vertical: 'top' - }, - variant: 'success' - }); - await fetchItem(); - } else { - dispatch({ type: SET_ERROR, error: response.errors[0].message }); - } - setShowResumeCluster(false); - }; - - if (loading) { - return ; - } - if (!warehouse) { - return null; - } - - return ( - <> - - Warehouses: Warehouse Details | data.all - - - - - - - - {tabs.map((tab) => ( - - ))} - - - - - {currentTab === 'overview' && ( - - )} - {currentTab === 'datasets' && ( - - )} - {currentTab === 'tags' && ( - - )} - {currentTab === 'stack' && ( - - )} - - - - - - ); -}; - -export default WarehouseView; diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 1c2e67c95..2b249630c 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -5,41 +5,47 @@ "@aashutoshrathi/word-wrap@^1.2.3": version "1.2.6" resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" + integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== "@adobe/css-tools@^4.0.1": - version "4.2.0" - resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.2.0.tgz#e1a84fca468f4b337816fcb7f0964beb620ba855" + version "4.3.1" + resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.3.1.tgz#abfccb8ca78075a2b6187345c26243c1a0842f28" + integrity sha512-/62yikz7NLScCGAAST5SHdnjaDJQBDq0M2muyRTpf2VQhw6StBg2ALiu73zSJQ4fMVLA+0uBhBHAle7Wg+2kSg== "@alloc/quick-lru@^5.2.0": version "5.2.0" resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" + integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== "@ampproject/remapping@^2.2.0": version "2.2.1" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== dependencies: "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" "@apideck/better-ajv-errors@^0.3.1": version "0.3.6" - resolved "https://registry.npmjs.org/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.6.tgz" + resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" + integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== dependencies: json-schema "^0.4.0" jsonpointer "^5.0.0" leven "^3.1.0" "@apollo/client@^3.3.19": - version "3.7.17" - resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.7.17.tgz#1d2538729fd8ef138aa301a7cf62704474e57b72" + version "3.8.1" + resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.8.1.tgz#a1e3045a5fb276c08e38f7b5f930551d79741257" + integrity sha512-JGGj/9bdoLEqzatRikDeN8etseY5qeFAY0vSAx/Pd0ePNsaflKzHx6V2NZ0NsGkInq+9IXXX3RLVDf0EotizMA== dependencies: "@graphql-typed-document-node/core" "^3.1.1" - "@wry/context" "^0.7.0" - "@wry/equality" "^0.5.0" - "@wry/trie" "^0.4.0" + "@wry/context" "^0.7.3" + "@wry/equality" "^0.5.6" + "@wry/trie" "^0.4.3" graphql-tag "^2.12.6" hoist-non-react-statics "^3.3.2" - optimism "^0.16.2" + optimism "^0.17.5" prop-types "^15.7.2" response-iterator "^0.2.6" symbol-observable "^4.0.0" @@ -49,13 +55,15 @@ "@appbaseio/analytics@^1.1.1": version "1.2.0" - resolved "https://registry.npmjs.org/@appbaseio/analytics/-/analytics-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/@appbaseio/analytics/-/analytics-1.2.0.tgz#e48da3fcd4ca78029aa9981d7e4c9689fe9aa078" + integrity sha512-uW5sfj8KjrlFtKKkkIMZJgqjNNOgfPUXgdl3lqEDWrdg7J/tMgkYNM3/s2c4LHsD96aHbrfA+DRHk5M45wgqpA== dependencies: cross-fetch "^3.0.4" -"@appbaseio/reactivecore@9.14.40": - version "9.14.40" - resolved "https://registry.npmjs.org/@appbaseio/reactivecore/-/reactivecore-9.14.40.tgz" +"@appbaseio/reactivecore@9.15.1": + version "9.15.1" + resolved "https://registry.yarnpkg.com/@appbaseio/reactivecore/-/reactivecore-9.15.1.tgz#283dff65f5cb15db58a48702b8238bdf38940850" + integrity sha512-p21u1DfTfd2C8sE4M0fnN98AahRaQQ4FKYv+3SdKT8l/WtNAuffIr1JtE7/jCualkBbJwlzuEoXmwuAfd59b+g== dependencies: cross-fetch "^3.0.4" dayjs "^1.11.7" @@ -64,15 +72,16 @@ redux-thunk "^2.3.0" "@appbaseio/reactivesearch@^3.43.10": - version "3.43.10" - resolved "https://registry.npmjs.org/@appbaseio/reactivesearch/-/reactivesearch-3.43.10.tgz" + version "3.45.0" + resolved "https://registry.yarnpkg.com/@appbaseio/reactivesearch/-/reactivesearch-3.45.0.tgz#cc39018bf3f0e30588041e63910c9cae99814322" + integrity sha512-rhZ+H5eucyx/TNW87HebTw0m0nutEDOXIo1zqLVI8PIEHosQltJisCpOcN7D3SOHgkX2e9rL1vZbDouxpnnVyw== dependencies: "@appbaseio/analytics" "^1.1.1" - "@appbaseio/reactivecore" "9.14.40" + "@appbaseio/reactivecore" "9.15.1" "@appbaseio/rheostat" "^1.0.0-alpha.15" "@emotion/core" "^10.0.28" "@emotion/styled" "^10.0.27" - appbase-js "^5.2.0" + appbase-js "^5.3.4" cross-env "^5.2.0" dayjs "^1.11.7" downshift "^1.31.2" @@ -90,17 +99,19 @@ "@appbaseio/rheostat@^1.0.0-alpha.15": version "1.0.0-alpha.15" - resolved "https://registry.npmjs.org/@appbaseio/rheostat/-/rheostat-1.0.0-alpha.15.tgz" + resolved "https://registry.yarnpkg.com/@appbaseio/rheostat/-/rheostat-1.0.0-alpha.15.tgz#c4db000c7e0adf973aaf95951ab9b4e90d806b26" + integrity sha512-dFDWv8SGGLRv5yGcQ5/3zYhVRDq2iIYmmU5v6s4/R0XRj2zCwld5O9DWi8WBZiDQJWQ7/7YUWTzqEgtxbYMOJA== dependencies: object.assign "^4.0.4" prop-types "^15.5.10" -"@aws-amplify/analytics@6.2.0": - version "6.2.0" - resolved "https://registry.npmjs.org/@aws-amplify/analytics/-/analytics-6.2.0.tgz" +"@aws-amplify/analytics@6.5.2": + version "6.5.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/analytics/-/analytics-6.5.2.tgz#5b890486803de138defc39a7fd8f234df92ee7a1" + integrity sha512-97JNpbtEtKrMFDfcT6+4x4GhkRA9yK/mmWrFAgHHQ2LHwHfFdHhad5ATC3MNlebzRT7MUm5HofOEtBqtiwudUA== dependencies: - "@aws-amplify/cache" "5.1.0" - "@aws-amplify/core" "5.4.0" + "@aws-amplify/cache" "5.1.8" + "@aws-amplify/core" "5.8.2" "@aws-sdk/client-firehose" "3.6.1" "@aws-sdk/client-kinesis" "3.6.1" "@aws-sdk/client-personalize-events" "3.6.1" @@ -109,76 +120,86 @@ tslib "^1.8.0" uuid "^3.2.1" -"@aws-amplify/api-graphql@3.3.1": - version "3.3.1" - resolved "https://registry.npmjs.org/@aws-amplify/api-graphql/-/api-graphql-3.3.1.tgz" +"@aws-amplify/api-graphql@3.4.8": + version "3.4.8" + resolved "https://registry.yarnpkg.com/@aws-amplify/api-graphql/-/api-graphql-3.4.8.tgz#7702c192a0bffdc8957ae036dc0a4723ca6d3897" + integrity sha512-gTq3aVJDuAVwA4m5nVzgyqmNIwbQlPok3LhklV3qSB6lsw61LnTjft7mkB871Grumi4z05u4mma9epHNv+9nFg== dependencies: - "@aws-amplify/api-rest" "3.2.1" - "@aws-amplify/auth" "5.4.1" - "@aws-amplify/cache" "5.1.0" - "@aws-amplify/core" "5.4.0" - "@aws-amplify/pubsub" "5.2.1" + "@aws-amplify/api-rest" "3.5.2" + "@aws-amplify/auth" "5.6.2" + "@aws-amplify/cache" "5.1.8" + "@aws-amplify/core" "5.8.2" + "@aws-amplify/pubsub" "5.5.2" graphql "15.8.0" tslib "^1.8.0" uuid "^3.2.1" zen-observable-ts "0.8.19" -"@aws-amplify/api-rest@3.2.1": - version "3.2.1" - resolved "https://registry.npmjs.org/@aws-amplify/api-rest/-/api-rest-3.2.1.tgz" +"@aws-amplify/api-rest@3.5.2": + version "3.5.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/api-rest/-/api-rest-3.5.2.tgz#5711cb329e2f42b2963c84e5affee94a3a8f73ba" + integrity sha512-yfZXXcTl/Dqm1jl8cmc4+eUzTA4PaRW/JAen22P/8bDgce+RxBrF0V8BhL0tPHLs8vCX7LnJZlrHTCMNn69Q2w== dependencies: - "@aws-amplify/core" "5.4.0" + "@aws-amplify/core" "5.8.2" axios "0.26.0" tslib "^1.8.0" url "0.11.0" -"@aws-amplify/api@5.2.1": - version "5.2.1" - resolved "https://registry.npmjs.org/@aws-amplify/api/-/api-5.2.1.tgz" +"@aws-amplify/api@5.4.2": + version "5.4.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/api/-/api-5.4.2.tgz#8e14309aeadc10aa961905785fe382d764e9857a" + integrity sha512-du6+nYtnFQfDBgR0ysCsd4fgNcRk+0gGzpaVnlDtzhW79GG0UwuFNRCwtZIeGN0K8bwLkiAwllYk0I24Hx/KcA== dependencies: - "@aws-amplify/api-graphql" "3.3.1" - "@aws-amplify/api-rest" "3.2.1" + "@aws-amplify/api-graphql" "3.4.8" + "@aws-amplify/api-rest" "3.5.2" tslib "^1.8.0" -"@aws-amplify/auth@5.4.1": - version "5.4.1" - resolved "https://registry.npmjs.org/@aws-amplify/auth/-/auth-5.4.1.tgz" +"@aws-amplify/auth@5.6.2": + version "5.6.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/auth/-/auth-5.6.2.tgz#92abdc0d0100e00e38c8b44dfbdf992ad895dc62" + integrity sha512-YwbGgwUP6VoRxPMT3e+bwK/onl+MEsA7PC/NdXj34MI6o4K0wcth1X6q9i8umnIhWMfmKNewqW1j+GhR4elH5Q== dependencies: - "@aws-amplify/core" "5.4.0" - amazon-cognito-identity-js "6.2.0" + "@aws-amplify/core" "5.8.2" + amazon-cognito-identity-js "6.3.3" + buffer "4.9.2" tslib "^1.8.0" url "0.11.0" -"@aws-amplify/cache@5.1.0": - version "5.1.0" - resolved "https://registry.npmjs.org/@aws-amplify/cache/-/cache-5.1.0.tgz" +"@aws-amplify/cache@5.1.8": + version "5.1.8" + resolved "https://registry.yarnpkg.com/@aws-amplify/cache/-/cache-5.1.8.tgz#ec856b657e0a9b2347bed41f7cbf36d624ba3836" + integrity sha512-nwlsy/IyVz8BjzHgmUzijzWodB1sps3OxQKwkvMdF4puWxpArapnfNqAy//j0S9lrc7gq7nrIvrlDPER+QFI3Q== dependencies: - "@aws-amplify/core" "5.4.0" + "@aws-amplify/core" "5.8.2" tslib "^1.8.0" -"@aws-amplify/core@5.4.0": - version "5.4.0" - resolved "https://registry.npmjs.org/@aws-amplify/core/-/core-5.4.0.tgz" +"@aws-amplify/core@5.8.2": + version "5.8.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/core/-/core-5.8.2.tgz#6d7ebccc885ffeafc4db888cdd938f75581085f3" + integrity sha512-Bv87DqUek9E/omVbsvSgeaQhwTj4q+rhhFgUi2abbnMc6vh7+H8BqRvJ/2ytp4NTBZMtdJulxT+5awKQKoibFQ== dependencies: "@aws-crypto/sha256-js" "1.2.2" "@aws-sdk/client-cloudwatch-logs" "3.6.1" "@aws-sdk/types" "3.6.1" "@aws-sdk/util-hex-encoding" "3.6.1" + "@types/node-fetch" "2.6.4" isomorphic-unfetch "^3.0.0" react-native-url-polyfill "^1.3.0" tslib "^1.8.0" universal-cookie "^4.0.4" zen-observable-ts "0.8.19" -"@aws-amplify/datastore@4.5.1": - version "4.5.1" - resolved "https://registry.npmjs.org/@aws-amplify/datastore/-/datastore-4.5.1.tgz" - dependencies: - "@aws-amplify/api" "5.2.1" - "@aws-amplify/auth" "5.4.1" - "@aws-amplify/core" "5.4.0" - "@aws-amplify/pubsub" "5.2.1" - amazon-cognito-identity-js "6.2.0" +"@aws-amplify/datastore@4.7.2": + version "4.7.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/datastore/-/datastore-4.7.2.tgz#b5c0f1cf99ec01fe5df10a83f6ea7cfd5c7e3a8e" + integrity sha512-cHGEZDBECNmwKShkl1jQvRvjl3GGvHhdDiAmh9I3hJwuU/TM7VUyWLUzpeKRl2gz9aDWACh2ty0+1IHYIGACfw== + dependencies: + "@aws-amplify/api" "5.4.2" + "@aws-amplify/auth" "5.6.2" + "@aws-amplify/core" "5.8.2" + "@aws-amplify/pubsub" "5.5.2" + amazon-cognito-identity-js "6.3.3" + buffer "4.9.2" idb "5.0.6" immer "9.0.6" ulid "2.3.0" @@ -186,44 +207,48 @@ zen-observable-ts "0.8.19" zen-push "0.2.1" -"@aws-amplify/geo@2.0.35": - version "2.0.35" - resolved "https://registry.npmjs.org/@aws-amplify/geo/-/geo-2.0.35.tgz" +"@aws-amplify/geo@2.3.2": + version "2.3.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/geo/-/geo-2.3.2.tgz#13367044b0e2490e1a7608eb6eb15e635a60051d" + integrity sha512-hzzeyaghGUGq7enXr9ajWtZg1csJwnK0ssvUIHCdC1fSlWq8f56wHNt0VAvuQk+cbVOAX6aTq2qUjrL3AsKjgQ== dependencies: - "@aws-amplify/core" "5.4.0" - "@aws-sdk/client-location" "3.186.2" + "@aws-amplify/core" "5.8.2" + "@aws-sdk/client-location" "3.186.3" "@turf/boolean-clockwise" "6.5.0" camelcase-keys "6.2.2" tslib "^1.8.0" -"@aws-amplify/interactions@5.1.1": - version "5.1.1" - resolved "https://registry.npmjs.org/@aws-amplify/interactions/-/interactions-5.1.1.tgz" +"@aws-amplify/interactions@5.2.8": + version "5.2.8" + resolved "https://registry.yarnpkg.com/@aws-amplify/interactions/-/interactions-5.2.8.tgz#ee13b84ecd6d84a6c86ad0643fa6427fc490b0cd" + integrity sha512-ttUv6L71XHDuV46YMVHbWWgNA2IdUvfes45CDzdfXCm7dCV064dljRU+bHpKk7JHnxyY2IeUPYjYbrbyTfPdZg== dependencies: - "@aws-amplify/core" "5.4.0" - "@aws-sdk/client-lex-runtime-service" "3.186.2" - "@aws-sdk/client-lex-runtime-v2" "3.186.2" + "@aws-amplify/core" "5.8.2" + "@aws-sdk/client-lex-runtime-service" "3.186.3" + "@aws-sdk/client-lex-runtime-v2" "3.186.3" base-64 "1.0.0" fflate "0.7.3" pako "2.0.4" tslib "^1.8.0" -"@aws-amplify/notifications@1.2.0": - version "1.2.0" - resolved "https://registry.npmjs.org/@aws-amplify/notifications/-/notifications-1.2.0.tgz" +"@aws-amplify/notifications@1.6.2": + version "1.6.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/notifications/-/notifications-1.6.2.tgz#91455b27e3b51f72571cdbf9888e2f4592cad034" + integrity sha512-XHI/bZ4ruyU+jZWwbMCFNqGjt963zMJUeb5MnU7hl3T/lRM4Ltd9i1uqoSrztMtjMea11jRKfRE2xvaByKAFiw== dependencies: - "@aws-amplify/cache" "5.1.0" - "@aws-amplify/core" "5.4.0" - "@aws-amplify/rtn-push-notification" "1.1.1" + "@aws-amplify/cache" "5.1.8" + "@aws-amplify/core" "5.8.2" + "@aws-amplify/rtn-push-notification" "1.1.4" lodash "^4.17.21" uuid "^3.2.1" -"@aws-amplify/predictions@5.2.3": - version "5.2.3" - resolved "https://registry.npmjs.org/@aws-amplify/predictions/-/predictions-5.2.3.tgz" +"@aws-amplify/predictions@5.5.2": + version "5.5.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/predictions/-/predictions-5.5.2.tgz#17f173c87cc8d0a623286565ad49dc54e9a809a9" + integrity sha512-3vu1Iu0otkz+q4yrcqF2va3xOca1t/oBohkv5W7OIJoDhVXBJB2cbSoWVI0zgVJ7sQrSiSszKtZZGTdFbZpJgQ== dependencies: - "@aws-amplify/core" "5.4.0" - "@aws-amplify/storage" "5.4.1" + "@aws-amplify/core" "5.8.2" + "@aws-amplify/storage" "5.9.2" "@aws-sdk/client-comprehend" "3.6.1" "@aws-sdk/client-polly" "3.6.1" "@aws-sdk/client-rekognition" "3.6.1" @@ -235,39 +260,43 @@ tslib "^1.8.0" uuid "^3.2.1" -"@aws-amplify/pubsub@5.2.1": - version "5.2.1" - resolved "https://registry.npmjs.org/@aws-amplify/pubsub/-/pubsub-5.2.1.tgz" +"@aws-amplify/pubsub@5.5.2": + version "5.5.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/pubsub/-/pubsub-5.5.2.tgz#dcb49c397abe073c5045078d5a0dac80276e7b27" + integrity sha512-93g7Ar7XjG2sFRyDBHtrUYQP8BfiI1JEh/QJmpRQVWffwUcihm8C8EsH0OkTTlA6FsxIR2T5qxHGLjzbz5pYRg== dependencies: - "@aws-amplify/auth" "5.4.1" - "@aws-amplify/cache" "5.1.0" - "@aws-amplify/core" "5.4.0" + "@aws-amplify/auth" "5.6.2" + "@aws-amplify/cache" "5.1.8" + "@aws-amplify/core" "5.8.2" + buffer "4.9.2" graphql "15.8.0" tslib "^1.8.0" url "0.11.0" uuid "^3.2.1" zen-observable-ts "0.8.19" -"@aws-amplify/rtn-push-notification@1.1.1": - version "1.1.1" - resolved "https://registry.npmjs.org/@aws-amplify/rtn-push-notification/-/rtn-push-notification-1.1.1.tgz" +"@aws-amplify/rtn-push-notification@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@aws-amplify/rtn-push-notification/-/rtn-push-notification-1.1.4.tgz#8b2404c0254750c3fb5a34b8b225bc7825890ff8" + integrity sha512-koXbK4n662TMj17Po1rJbjIa6RurpRH1KOFHtT8mO3nHYoUENi0sMau7IIghjPNgcH/bsm2EB0gV5YMYliEUzw== -"@aws-amplify/storage@5.4.1": - version "5.4.1" - resolved "https://registry.npmjs.org/@aws-amplify/storage/-/storage-5.4.1.tgz" +"@aws-amplify/storage@5.9.2": + version "5.9.2" + resolved "https://registry.yarnpkg.com/@aws-amplify/storage/-/storage-5.9.2.tgz#2b6b0bf0d92aabb7f3cf0e95a7ea53d6c0390a9e" + integrity sha512-pTWivMRQ/yNv33kLj85M8pH/zpf0CBuF1KRP84VC9NY9+Ilm9K4bq/gsBZjpIx/nYhL8BDpwRyrveLzPiiGHPA== dependencies: - "@aws-amplify/core" "5.4.0" - "@aws-sdk/client-s3" "3.6.3" - "@aws-sdk/s3-request-presigner" "3.6.1" - "@aws-sdk/util-create-request" "3.6.1" - "@aws-sdk/util-format-url" "3.6.1" - axios "0.26.0" + "@aws-amplify/core" "5.8.2" + "@aws-sdk/md5-js" "3.6.1" + "@aws-sdk/types" "3.6.1" + buffer "4.9.2" events "^3.1.0" + fast-xml-parser "^4.2.5" tslib "^1.8.0" "@aws-crypto/crc32@2.0.0": version "2.0.0" - resolved "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-2.0.0.tgz#4ad432a3c03ec3087c5540ff6e41e6565d2dc153" + integrity sha512-TvE1r2CUueyXOuHdEigYjIZVesInd9KN+K/TFFNfkkxRThiNxO6i4ZqqAVMoEjAamZZ1AA8WXJkjCz7YShHPQA== dependencies: "@aws-crypto/util" "^2.0.0" "@aws-sdk/types" "^3.1.0" @@ -275,7 +304,8 @@ "@aws-crypto/crc32@^1.0.0": version "1.2.2" - resolved "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-1.2.2.tgz#4a758a596fa8cb3ab463f037a78c2ca4992fe81f" + integrity sha512-8K0b1672qbv05chSoKpwGZ3fhvVp28Fg3AVHVkEHFl2lTLChO7wD/hTyyo8ING7uc31uZRt7bNra/hA74Td7Tw== dependencies: "@aws-crypto/util" "^1.2.2" "@aws-sdk/types" "^3.1.0" @@ -283,19 +313,22 @@ "@aws-crypto/ie11-detection@^1.0.0": version "1.0.0" - resolved "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-1.0.0.tgz#d3a6af29ba7f15458f79c41d1cd8cac3925e726a" + integrity sha512-kCKVhCF1oDxFYgQrxXmIrS5oaWulkvRcPz+QBDMsUr2crbF4VGgGT6+uQhSwJFdUAQ2A//Vq+uT83eJrkzFgXA== dependencies: tslib "^1.11.1" "@aws-crypto/ie11-detection@^2.0.0": version "2.0.2" - resolved "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-2.0.2.tgz#9c39f4a5558196636031a933ec1b4792de959d6a" + integrity sha512-5XDMQY98gMAf/WRTic5G++jfmS/VLM0rwpiOpaainKi4L0nqWMSB1SzsrEG5rjFZGYN6ZAefO+/Yta2dFM0kMw== dependencies: tslib "^1.11.1" "@aws-crypto/sha256-browser@2.0.0": version "2.0.0" - resolved "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-2.0.0.tgz#741c9024df55ec59b51e5b1f5d806a4852699fb5" + integrity sha512-rYXOQ8BFOaqMEHJrLHul/25ckWH6GTJtdLSajhlqGMx0PmSueAuvboCuZCTqEKlxR8CQOwRarxYMZZSYlhRA1A== dependencies: "@aws-crypto/ie11-detection" "^2.0.0" "@aws-crypto/sha256-js" "^2.0.0" @@ -308,7 +341,8 @@ "@aws-crypto/sha256-browser@^1.0.0": version "1.2.2" - resolved "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-1.2.2.tgz#004d806e3bbae130046c259ec3279a02d4a0b576" + integrity sha512-0tNR4kBtJp+9S0kis4+JLab3eg6QWuIeuPhzaYoYwNUXGBgsWIkktA2mnilet+EGWzf3n1zknJXC4X4DVyyXbg== dependencies: "@aws-crypto/ie11-detection" "^1.0.0" "@aws-crypto/sha256-js" "^1.2.2" @@ -320,43 +354,58 @@ "@aws-crypto/sha256-js@1.2.2", "@aws-crypto/sha256-js@^1.0.0", "@aws-crypto/sha256-js@^1.2.2": version "1.2.2" - resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-1.2.2.tgz#02acd1a1fda92896fc5a28ec7c6e164644ea32fc" + integrity sha512-Nr1QJIbW/afYYGzYvrF70LtaHrIRtd4TNAglX8BvlfxJLZ45SAmueIKYl5tWoNBPzp65ymXGFK0Bb1vZUpuc9g== dependencies: "@aws-crypto/util" "^1.2.2" "@aws-sdk/types" "^3.1.0" tslib "^1.11.1" -"@aws-crypto/sha256-js@2.0.0", "@aws-crypto/sha256-js@^2.0.0": +"@aws-crypto/sha256-js@2.0.0": version "2.0.0" - resolved "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-2.0.0.tgz#f1f936039bdebd0b9e2dd834d65afdc2aac4efcb" + integrity sha512-VZY+mCY4Nmrs5WGfitmNqXzaE873fcIZDu54cbaDaaamsaTOP1DBImV9F4pICc3EHjQXujyE8jig+PFCaew9ig== dependencies: "@aws-crypto/util" "^2.0.0" "@aws-sdk/types" "^3.1.0" tslib "^1.11.1" +"@aws-crypto/sha256-js@^2.0.0": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-2.0.2.tgz#c81e5d378b8a74ff1671b58632779986e50f4c99" + integrity sha512-iXLdKH19qPmIC73fVCrHWCSYjN/sxaAvZ3jNNyw6FclmHyjLKg0f69WlC9KTnyElxCR5MO9SKaG00VwlJwyAkQ== + dependencies: + "@aws-crypto/util" "^2.0.2" + "@aws-sdk/types" "^3.110.0" + tslib "^1.11.1" + "@aws-crypto/supports-web-crypto@^1.0.0": version "1.0.0" - resolved "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-1.0.0.tgz#c40901bc17ac1e875e248df16a2b47ad8bfd9a93" + integrity sha512-IHLfv+WmVH89EW4n6a5eE8/hUlz6qkWGMn/v4r5ZgzcXdTC5nolii2z3k46y01hWRiC2PPhOdeSLzMUCUMco7g== dependencies: tslib "^1.11.1" "@aws-crypto/supports-web-crypto@^2.0.0": version "2.0.2" - resolved "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-2.0.2.tgz#9f02aafad8789cac9c0ab5faaebb1ab8aa841338" + integrity sha512-6mbSsLHwZ99CTOOswvCRP3C+VCWnzBf+1SnbWxzzJ9lR0mA0JnY2JEAhp8rqmTE0GPFy88rrM27ffgp62oErMQ== dependencies: tslib "^1.11.1" "@aws-crypto/util@^1.2.2": version "1.2.2" - resolved "https://registry.npmjs.org/@aws-crypto/util/-/util-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-1.2.2.tgz#b28f7897730eb6538b21c18bd4de22d0ea09003c" + integrity sha512-H8PjG5WJ4wz0UXAFXeJjWCW1vkvIJ3qUUD+rGRwJ2/hj+xT58Qle2MTql/2MGzkU+1JLAFuR6aJpLAjHwhmwwg== dependencies: "@aws-sdk/types" "^3.1.0" "@aws-sdk/util-utf8-browser" "^3.0.0" tslib "^1.11.1" -"@aws-crypto/util@^2.0.0": +"@aws-crypto/util@^2.0.0", "@aws-crypto/util@^2.0.2": version "2.0.2" - resolved "https://registry.npmjs.org/@aws-crypto/util/-/util-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-2.0.2.tgz#adf5ff5dfbc7713082f897f1d01e551ce0edb9c0" + integrity sha512-Lgu5v/0e/BcrZ5m/IWqzPUf3UYFTy/PpeED+uc9SWUR1iZQL8XXbGQg10UfllwwBryO3hFF5dizK+78aoXC1eA== dependencies: "@aws-sdk/types" "^3.110.0" "@aws-sdk/util-utf8-browser" "^3.0.0" @@ -364,34 +413,24 @@ "@aws-sdk/abort-controller@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/abort-controller/-/abort-controller-3.186.0.tgz#dfaccd296d57136930582e1a19203d6cb60debc7" + integrity sha512-JFvvvtEcbYOvVRRXasi64Dd1VcOz5kJmPvtzsJ+HzMHvPbGGs/aopOJAZQJMJttzJmJwVTay0QL6yag9Kk8nYA== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/abort-controller@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/abort-controller/-/abort-controller-3.6.1.tgz#75812875bbef6ad17e0e3a6d96aab9df636376f9" + integrity sha512-X81XkxX/2Tvv9YNcEto/rcQzPIdKJHFSnl9hBl/qkSdCFV/GaQ2XNWfKm5qFXMLlZNFS0Fn5CnBJ83qnBm47vg== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" -"@aws-sdk/chunked-blob-reader-native@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/chunked-blob-reader-native/-/chunked-blob-reader-native-3.6.1.tgz" - dependencies: - "@aws-sdk/util-base64-browser" "3.6.1" - tslib "^1.8.0" - -"@aws-sdk/chunked-blob-reader@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/chunked-blob-reader/-/chunked-blob-reader-3.6.1.tgz" - dependencies: - tslib "^1.8.0" - "@aws-sdk/client-cloudwatch-logs@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.6.1.tgz#5e8dba495a2ba9a901b0a1a2d53edef8bd452398" + integrity sha512-QOxIDnlVTpnwJ26Gap6RGz61cDLH6TKrIp30VqwdMeT1pCGy8mn9rWln6XA+ymkofHy/08RfpGp+VN4axwd4Lw== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -427,7 +466,8 @@ "@aws-sdk/client-comprehend@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-comprehend/-/client-comprehend-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-comprehend/-/client-comprehend-3.6.1.tgz#d640d510b49feafa94ac252cdd7942cbe5537249" + integrity sha512-Y2ixlSTjjAp2HJhkUArtYqC/X+zG5Qqu3Bl+Ez22u4u4YnG8HsNFD6FE1axuWSdSa5AFtWTEt+Cz2Ghj/tDySA== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -464,7 +504,8 @@ "@aws-sdk/client-firehose@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-firehose/-/client-firehose-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-firehose/-/client-firehose-3.6.1.tgz#87a8ef0c18267907b3ce712e6d3de8f36b0a7c7b" + integrity sha512-KhiKCm+cJmnRFuAEyO3DBpFVDNix1XcVikdxk2lvYbFWkM1oUZoBpudxaJ+fPf2W3stF3CXIAOP+TnGqSZCy9g== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -500,7 +541,8 @@ "@aws-sdk/client-kinesis@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-kinesis/-/client-kinesis-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-kinesis/-/client-kinesis-3.6.1.tgz#48583cc854f9108bc8ff6168005d9a05b24bae31" + integrity sha512-Ygo+92LxHeUZmiyhiHT+k7hIOhJd6S7ckCEVUsQs2rfwe9bAygUY/3cCoZSqgWy7exFRRKsjhzStcyV6i6jrVQ== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -538,13 +580,14 @@ "@aws-sdk/util-waiter" "3.6.1" tslib "^2.0.0" -"@aws-sdk/client-lex-runtime-service@3.186.2": - version "3.186.2" - resolved "https://registry.npmjs.org/@aws-sdk/client-lex-runtime-service/-/client-lex-runtime-service-3.186.2.tgz" +"@aws-sdk/client-lex-runtime-service@3.186.3": + version "3.186.3" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-lex-runtime-service/-/client-lex-runtime-service-3.186.3.tgz#cc1130254d50dc1a5b85ac736e6f764b0fa145c3" + integrity sha512-YP+GDY9OxyW4rJDqjreaNpiDBvH1uzO3ShJKl57hT92Kw2auDQxttcMf//J8dQXvrVkW/fVXCLI9TmtxS7XJOQ== dependencies: "@aws-crypto/sha256-browser" "2.0.0" "@aws-crypto/sha256-js" "2.0.0" - "@aws-sdk/client-sts" "3.186.2" + "@aws-sdk/client-sts" "3.186.3" "@aws-sdk/config-resolver" "3.186.0" "@aws-sdk/credential-provider-node" "3.186.0" "@aws-sdk/fetch-http-handler" "3.186.0" @@ -577,13 +620,14 @@ "@aws-sdk/util-utf8-node" "3.186.0" tslib "^2.3.1" -"@aws-sdk/client-lex-runtime-v2@3.186.2": - version "3.186.2" - resolved "https://registry.npmjs.org/@aws-sdk/client-lex-runtime-v2/-/client-lex-runtime-v2-3.186.2.tgz" +"@aws-sdk/client-lex-runtime-v2@3.186.3": + version "3.186.3" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-lex-runtime-v2/-/client-lex-runtime-v2-3.186.3.tgz#7baa6772ce3fdd7265fca2daa75eb0e896f27764" + integrity sha512-4MJfSnb+qM8BYW4ToCvg7sDWN0NcEqK738hCZUV89cjp7pIHZ6osJuS/PsmZEommVj+71GviZ4buu5KUCfCGFQ== dependencies: "@aws-crypto/sha256-browser" "2.0.0" "@aws-crypto/sha256-js" "2.0.0" - "@aws-sdk/client-sts" "3.186.2" + "@aws-sdk/client-sts" "3.186.3" "@aws-sdk/config-resolver" "3.186.0" "@aws-sdk/credential-provider-node" "3.186.0" "@aws-sdk/eventstream-handler-node" "3.186.0" @@ -621,13 +665,14 @@ "@aws-sdk/util-utf8-node" "3.186.0" tslib "^2.3.1" -"@aws-sdk/client-location@3.186.2": - version "3.186.2" - resolved "https://registry.npmjs.org/@aws-sdk/client-location/-/client-location-3.186.2.tgz" +"@aws-sdk/client-location@3.186.3": + version "3.186.3" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-location/-/client-location-3.186.3.tgz#c812ae3dabf76153ad046413298a1ab53cadee9a" + integrity sha512-LCMFgoWfvKBnZhhtl93RLhrsHCalM7huaxErHSKoqWDBUDP0i7rOX73qW8E25j/vQ4emEkT0d6ts1rDu4EnlNw== dependencies: "@aws-crypto/sha256-browser" "2.0.0" "@aws-crypto/sha256-js" "2.0.0" - "@aws-sdk/client-sts" "3.186.2" + "@aws-sdk/client-sts" "3.186.3" "@aws-sdk/config-resolver" "3.186.0" "@aws-sdk/credential-provider-node" "3.186.0" "@aws-sdk/fetch-http-handler" "3.186.0" @@ -662,7 +707,8 @@ "@aws-sdk/client-personalize-events@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-personalize-events/-/client-personalize-events-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-personalize-events/-/client-personalize-events-3.6.1.tgz#86942bb64108cfc2f6c31a8b54aab6fa7f7be00f" + integrity sha512-x9Jl/7emSQsB6GwBvjyw5BiSO26CnH4uvjNit6n54yNMtJ26q0+oIxkplnUDyjLTfLRe373c/z5/4dQQtDffkw== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -698,7 +744,8 @@ "@aws-sdk/client-polly@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-polly/-/client-polly-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-polly/-/client-polly-3.6.1.tgz#869deb186e57fca29737bfa7af094599d7879841" + integrity sha512-y6fxVYndGS7z2KqHViPCqagBEOsZlxBUYUJZuD6WWTiQrI0Pwe5qG02oKJVaa5OmxE20QLf6bRBWj2rQpeF4IQ== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -734,70 +781,22 @@ "@aws-sdk/client-rekognition@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-rekognition/-/client-rekognition-3.6.1.tgz" - dependencies: - "@aws-crypto/sha256-browser" "^1.0.0" - "@aws-crypto/sha256-js" "^1.0.0" - "@aws-sdk/config-resolver" "3.6.1" - "@aws-sdk/credential-provider-node" "3.6.1" - "@aws-sdk/fetch-http-handler" "3.6.1" - "@aws-sdk/hash-node" "3.6.1" - "@aws-sdk/invalid-dependency" "3.6.1" - "@aws-sdk/middleware-content-length" "3.6.1" - "@aws-sdk/middleware-host-header" "3.6.1" - "@aws-sdk/middleware-logger" "3.6.1" - "@aws-sdk/middleware-retry" "3.6.1" - "@aws-sdk/middleware-serde" "3.6.1" - "@aws-sdk/middleware-signing" "3.6.1" - "@aws-sdk/middleware-stack" "3.6.1" - "@aws-sdk/middleware-user-agent" "3.6.1" - "@aws-sdk/node-config-provider" "3.6.1" - "@aws-sdk/node-http-handler" "3.6.1" - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/smithy-client" "3.6.1" - "@aws-sdk/types" "3.6.1" - "@aws-sdk/url-parser" "3.6.1" - "@aws-sdk/url-parser-native" "3.6.1" - "@aws-sdk/util-base64-browser" "3.6.1" - "@aws-sdk/util-base64-node" "3.6.1" - "@aws-sdk/util-body-length-browser" "3.6.1" - "@aws-sdk/util-body-length-node" "3.6.1" - "@aws-sdk/util-user-agent-browser" "3.6.1" - "@aws-sdk/util-user-agent-node" "3.6.1" - "@aws-sdk/util-utf8-browser" "3.6.1" - "@aws-sdk/util-utf8-node" "3.6.1" - "@aws-sdk/util-waiter" "3.6.1" - tslib "^2.0.0" - -"@aws-sdk/client-s3@3.6.3": - version "3.6.3" - resolved "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.6.3.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-rekognition/-/client-rekognition-3.6.1.tgz#710ba6d4509a2caa417cf0702ba81b5b65aa73eb" + integrity sha512-Ia4FEog9RrI0IoDRbOJO6djwhVAAaEZutxEKrWbjrVz4bgib28L+V+yAio2SUneeirj8pNYXwBKPfoYOUqGHhA== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" "@aws-sdk/config-resolver" "3.6.1" "@aws-sdk/credential-provider-node" "3.6.1" - "@aws-sdk/eventstream-serde-browser" "3.6.1" - "@aws-sdk/eventstream-serde-config-resolver" "3.6.1" - "@aws-sdk/eventstream-serde-node" "3.6.1" "@aws-sdk/fetch-http-handler" "3.6.1" - "@aws-sdk/hash-blob-browser" "3.6.1" "@aws-sdk/hash-node" "3.6.1" - "@aws-sdk/hash-stream-node" "3.6.1" "@aws-sdk/invalid-dependency" "3.6.1" - "@aws-sdk/md5-js" "3.6.1" - "@aws-sdk/middleware-apply-body-checksum" "3.6.1" - "@aws-sdk/middleware-bucket-endpoint" "3.6.1" "@aws-sdk/middleware-content-length" "3.6.1" - "@aws-sdk/middleware-expect-continue" "3.6.1" "@aws-sdk/middleware-host-header" "3.6.1" - "@aws-sdk/middleware-location-constraint" "3.6.1" "@aws-sdk/middleware-logger" "3.6.1" "@aws-sdk/middleware-retry" "3.6.1" - "@aws-sdk/middleware-sdk-s3" "3.6.1" "@aws-sdk/middleware-serde" "3.6.1" "@aws-sdk/middleware-signing" "3.6.1" - "@aws-sdk/middleware-ssec" "3.6.1" "@aws-sdk/middleware-stack" "3.6.1" "@aws-sdk/middleware-user-agent" "3.6.1" "@aws-sdk/node-config-provider" "3.6.1" @@ -816,13 +815,12 @@ "@aws-sdk/util-utf8-browser" "3.6.1" "@aws-sdk/util-utf8-node" "3.6.1" "@aws-sdk/util-waiter" "3.6.1" - "@aws-sdk/xml-builder" "3.6.1" - fast-xml-parser "4.2.4" tslib "^2.0.0" "@aws-sdk/client-sso@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.186.0.tgz#233bdd1312dbf88ef9452f8a62c3c3f1ac580330" + integrity sha512-qwLPomqq+fjvp42izzEpBEtGL2+dIlWH5pUCteV55hTEwHgo+m9LJPIrMWkPeoMBzqbNiu5n6+zihnwYlCIlEA== dependencies: "@aws-crypto/sha256-browser" "2.0.0" "@aws-crypto/sha256-js" "2.0.0" @@ -856,9 +854,10 @@ "@aws-sdk/util-utf8-node" "3.186.0" tslib "^2.3.1" -"@aws-sdk/client-sts@3.186.2": - version "3.186.2" - resolved "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.186.2.tgz" +"@aws-sdk/client-sts@3.186.3": + version "3.186.3" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.186.3.tgz#1c12355cb9d3cadc64ab74c91c3d57515680dfbd" + integrity sha512-mnttdyYBtqO+FkDtOT3F1FGi8qD11fF5/3zYLaNuFFULqKneaIwW2YIsjFlgvPGpmoyo/tNplnZwhQ9xQtT3Sw== dependencies: "@aws-crypto/sha256-browser" "2.0.0" "@aws-crypto/sha256-js" "2.0.0" @@ -894,12 +893,13 @@ "@aws-sdk/util-utf8-browser" "3.186.0" "@aws-sdk/util-utf8-node" "3.186.0" entities "2.2.0" - fast-xml-parser "4.2.4" + fast-xml-parser "4.2.5" tslib "^2.3.1" "@aws-sdk/client-textract@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-textract/-/client-textract-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-textract/-/client-textract-3.6.1.tgz#b8972f53f0353222b4c052adc784291e602be6aa" + integrity sha512-nLrBzWDt3ToiGVFF4lW7a/eZpI2zjdvu7lwmOWyXX8iiPzhBVVEfd5oOorRyJYBsGMslp4sqV8TBkU5Ld/a97Q== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -935,7 +935,8 @@ "@aws-sdk/client-translate@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/client-translate/-/client-translate-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-translate/-/client-translate-3.6.1.tgz#ce855c9fe7885b930d4039c2e45c869e3c0a6656" + integrity sha512-RIHY+Og1i43B5aWlfUUk0ZFnNfM7j2vzlYUwOqhndawV49GFf96M3pmskR5sKEZI+5TXY77qR9TgZ/r3UxVCRQ== dependencies: "@aws-crypto/sha256-browser" "^1.0.0" "@aws-crypto/sha256-js" "^1.0.0" @@ -972,7 +973,8 @@ "@aws-sdk/config-resolver@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/config-resolver/-/config-resolver-3.186.0.tgz#68bbf82b572f03ee3ec9ac84d000147e1050149b" + integrity sha512-l8DR7Q4grEn1fgo2/KvtIfIHJS33HGKPQnht8OPxkl0dMzOJ0jxjOw/tMbrIcPnr2T3Fi7LLcj3dY1Fo1poruQ== dependencies: "@aws-sdk/signature-v4" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -982,7 +984,8 @@ "@aws-sdk/config-resolver@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/config-resolver/-/config-resolver-3.6.1.tgz#3bcc5e6a0ebeedf0981b0540e1f18a72b4dafebf" + integrity sha512-qjP1g3jLIm+XvOIJ4J7VmZRi87vsDmTRzIFePVeG+EFWwYQLxQjTGMdIj3yKTh1WuZ0HByf47mGcpiS4HZLm1Q== dependencies: "@aws-sdk/signature-v4" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -990,7 +993,8 @@ "@aws-sdk/credential-provider-env@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.186.0.tgz#55dec9c4c29ebbdff4f3bce72de9e98f7a1f92e1" + integrity sha512-N9LPAqi1lsQWgxzmU4NPvLPnCN5+IQ3Ai1IFf3wM6FFPNoSUd1kIA2c6xaf0BE7j5Kelm0raZOb4LnV3TBAv+g== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -998,7 +1002,8 @@ "@aws-sdk/credential-provider-env@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.6.1.tgz#d8b2dd36836432a9b8ec05a5cf9fe428b04c9964" + integrity sha512-coeFf/HnhpGidcAN1i1NuFgyFB2M6DeN1zNVy4f6s4mAh96ftr9DgWM1CcE3C+cLHEdpNqleVgC/2VQpyzOBLQ== dependencies: "@aws-sdk/property-provider" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1006,7 +1011,8 @@ "@aws-sdk/credential-provider-imds@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.186.0.tgz#73e0f62832726c7734b4f6c50a02ab0d869c00e1" + integrity sha512-iJeC7KrEgPPAuXjCZ3ExYZrRQvzpSdTZopYgUm5TnNZ8S1NU/4nvv5xVy61JvMj3JQAeG8UDYYgC421Foc8wQw== dependencies: "@aws-sdk/node-config-provider" "3.186.0" "@aws-sdk/property-provider" "3.186.0" @@ -1016,7 +1022,8 @@ "@aws-sdk/credential-provider-imds@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.6.1.tgz#b5a8b8ef15eac26c58e469451a6c7c34ab3ca875" + integrity sha512-bf4LMI418OYcQbyLZRAW8Q5AYM2IKrNqOnIcfrFn2f17ulG7TzoWW3WN/kMOw4TC9+y+vIlCWOv87GxU1yP0Bg== dependencies: "@aws-sdk/property-provider" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1024,7 +1031,8 @@ "@aws-sdk/credential-provider-ini@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.186.0.tgz#3b3873ccae855ee3f6f15dcd8212c5ca4ec01bf3" + integrity sha512-ecrFh3MoZhAj5P2k/HXo/hMJQ3sfmvlommzXuZ/D1Bj2yMcyWuBhF1A83Fwd2gtYrWRrllsK3IOMM5Jr8UIVZA== dependencies: "@aws-sdk/credential-provider-env" "3.186.0" "@aws-sdk/credential-provider-imds" "3.186.0" @@ -1037,7 +1045,8 @@ "@aws-sdk/credential-provider-ini@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.6.1.tgz#0da6d9341e621f8e0815814ed017b88e268fbc3d" + integrity sha512-3jguW6+ttRNddRZvbrs1yb3F1jrUbqyv0UfRoHuOGthjTt+L9sDpJaJGugYnT3bS9WBu1NydLVE2kDV++mJGVw== dependencies: "@aws-sdk/property-provider" "3.6.1" "@aws-sdk/shared-ini-file-loader" "3.6.1" @@ -1046,7 +1055,8 @@ "@aws-sdk/credential-provider-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.186.0.tgz#0be58623660b41eed3a349a89b31a01d4cc773ea" + integrity sha512-HIt2XhSRhEvVgRxTveLCzIkd/SzEBQfkQ6xMJhkBtfJw1o3+jeCk+VysXM0idqmXytctL0O3g9cvvTHOsUgxOA== dependencies: "@aws-sdk/credential-provider-env" "3.186.0" "@aws-sdk/credential-provider-imds" "3.186.0" @@ -1061,7 +1071,8 @@ "@aws-sdk/credential-provider-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.6.1.tgz#0055292a4f0f49d053e8dfcc9174d8d2cf6862bb" + integrity sha512-VAHOcsqkPrF1k/fA62pv9c75lUWe5bHpcbFX83C3EUPd2FXV10Lfkv6bdWhyZPQy0k8T+9/yikHH3c7ZQeFE5A== dependencies: "@aws-sdk/credential-provider-env" "3.6.1" "@aws-sdk/credential-provider-imds" "3.6.1" @@ -1074,7 +1085,8 @@ "@aws-sdk/credential-provider-process@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.186.0.tgz#e3be60983261a58c212f5c38b6fb76305bbb8ce7" + integrity sha512-ATRU6gbXvWC1TLnjOEZugC/PBXHBoZgBADid4fDcEQY1vF5e5Ux1kmqkJxyHtV5Wl8sE2uJfwWn+FlpUHRX67g== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/shared-ini-file-loader" "3.186.0" @@ -1083,7 +1095,8 @@ "@aws-sdk/credential-provider-process@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.6.1.tgz#5bf851f3ee232c565b8c82608926df0ad28c1958" + integrity sha512-d0/TpMoEV4qMYkdpyyjU2Otse9X2jC1DuxWajHOWZYEw8oejMvXYTZ10hNaXZvAcNM9q214rp+k4mkt6gIcI6g== dependencies: "@aws-sdk/credential-provider-ini" "3.6.1" "@aws-sdk/property-provider" "3.6.1" @@ -1093,7 +1106,8 @@ "@aws-sdk/credential-provider-sso@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.186.0.tgz#e1aa466543b3b0877d45b885a1c11b329232df22" + integrity sha512-mJ+IZljgXPx99HCmuLgBVDPLepHrwqnEEC/0wigrLCx6uz3SrAWmGZsNbxSEtb2CFSAaczlTHcU/kIl7XZIyeQ== dependencies: "@aws-sdk/client-sso" "3.186.0" "@aws-sdk/property-provider" "3.186.0" @@ -1103,7 +1117,8 @@ "@aws-sdk/credential-provider-web-identity@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.186.0.tgz#db43f37f7827b553490dd865dbaa9a2c45f95494" + integrity sha512-KqzI5eBV72FE+8SuOQAu+r53RXGVHg4AuDJmdXyo7Gc4wS/B9FNElA8jVUjjYgVnf0FSiri+l41VzQ44dCopSA== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1111,7 +1126,8 @@ "@aws-sdk/eventstream-codec@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-codec/-/eventstream-codec-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-codec/-/eventstream-codec-3.186.0.tgz#9da9608866b38179edf72987f2bc3b865d11db13" + integrity sha512-3kLcJ0/H+zxFlhTlE1SGoFpzd/SitwXOsTSlYVwrwdISKRjooGg0BJpm1CSTkvmWnQIUlYijJvS96TAJ+fCPIA== dependencies: "@aws-crypto/crc32" "2.0.0" "@aws-sdk/types" "3.186.0" @@ -1120,7 +1136,8 @@ "@aws-sdk/eventstream-handler-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-handler-node/-/eventstream-handler-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-handler-node/-/eventstream-handler-node-3.186.0.tgz#d58aec9a8617ed1a9a3800d5526333deb3efebb2" + integrity sha512-S8eAxCHyFAGSH7F6GHKU2ckpiwFPwJUQwMzewISLg3wzLQeu6lmduxBxVaV3/SoEbEMsbNmrgw9EXtw3Vt/odQ== dependencies: "@aws-sdk/eventstream-codec" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1128,7 +1145,8 @@ "@aws-sdk/eventstream-marshaller@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-marshaller/-/eventstream-marshaller-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-marshaller/-/eventstream-marshaller-3.6.1.tgz#6abfbdf3639249d1a77686cbcae5d8e47bcba989" + integrity sha512-ZvN3Nvxn2Gul08L9MOSN123LwSO0E1gF/CqmOGZtEWzPnoSX/PWM9mhPPeXubyw2KdlXylOodYYw3EAATk3OmA== dependencies: "@aws-crypto/crc32" "^1.0.0" "@aws-sdk/types" "3.6.1" @@ -1137,7 +1155,8 @@ "@aws-sdk/eventstream-serde-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.186.0.tgz#2a0bd942f977b3e2f1a77822ac091ddebe069475" + integrity sha512-0r2c+yugBdkP5bglGhGOgztjeHdHTKqu2u6bvTByM0nJShNO9YyqWygqPqDUOE5axcYQE1D0aFDGzDtP3mGJhw== dependencies: "@aws-sdk/eventstream-serde-universal" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1145,7 +1164,8 @@ "@aws-sdk/eventstream-serde-browser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-browser/-/eventstream-serde-browser-3.6.1.tgz#1253bd5215745f79d534fc9bc6bd006ee7a0f239" + integrity sha512-J8B30d+YUfkBtgWRr7+9AfYiPnbG28zjMlFGsJf8Wxr/hDCfff+Z8NzlBYFEbS7McXXhRiIN8DHUvCtolJtWJQ== dependencies: "@aws-sdk/eventstream-marshaller" "3.6.1" "@aws-sdk/eventstream-serde-universal" "3.6.1" @@ -1154,21 +1174,24 @@ "@aws-sdk/eventstream-serde-config-resolver@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.186.0.tgz#6c277058bb0fa14752f0b6d7043576e0b5f13da4" + integrity sha512-xhwCqYrAX5c7fg9COXVw6r7Sa3BO5cCfQMSR5S1QisE7do8K1GDKEHvUCheOx+RLon+P3glLjuNBMdD0HfCVNA== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/eventstream-serde-config-resolver@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.6.1.tgz#ebb5c1614f55d0ebb225defac1f76c420e188086" + integrity sha512-72pCzcT/KeD4gPgRVBSQzEzz4JBim8bNwPwZCGaIYdYAsAI8YMlvp0JNdis3Ov9DFURc87YilWKQlAfw7CDJxA== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/eventstream-serde-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.186.0.tgz#dabeab714f447790c5dd31d401c5a3822b795109" + integrity sha512-9p/gdukJYfmA+OEYd6MfIuufxrrfdt15lBDM3FODuc9j09LSYSRHSxthkIhiM5XYYaaUM+4R0ZlSMdaC3vFDFQ== dependencies: "@aws-sdk/eventstream-serde-universal" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1176,7 +1199,8 @@ "@aws-sdk/eventstream-serde-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-node/-/eventstream-serde-node-3.6.1.tgz#705e12bea185905a198d7812af10e3a679dfc841" + integrity sha512-rjBbJFjCrEcm2NxZctp+eJmyPxKYayG3tQZo8PEAQSViIlK5QexQI3fgqNAeCtK7l/SFAAvnOMRZF6Z3NdUY6A== dependencies: "@aws-sdk/eventstream-marshaller" "3.6.1" "@aws-sdk/eventstream-serde-universal" "3.6.1" @@ -1185,7 +1209,8 @@ "@aws-sdk/eventstream-serde-universal@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.186.0.tgz#85a88a2cd5c336b1271976fa8db70654ec90fbf4" + integrity sha512-rIgPmwUxn2tzainBoh+cxAF+b7o01CcW+17yloXmawsi0kiR7QK7v9m/JTGQPWKtHSsPOrtRzuiWQNX57SlcsQ== dependencies: "@aws-sdk/eventstream-codec" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1193,7 +1218,8 @@ "@aws-sdk/eventstream-serde-universal@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/eventstream-serde-universal/-/eventstream-serde-universal-3.6.1.tgz#5be6865adb55436cbc90557df3a3c49b53553470" + integrity sha512-rpRu97yAGHr9GQLWMzcGICR2PxNu1dHU/MYc9Kb6UgGeZd4fod4o1zjhAJuj98cXn2xwHNFM4wMKua6B4zKrZg== dependencies: "@aws-sdk/eventstream-marshaller" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1201,7 +1227,8 @@ "@aws-sdk/fetch-http-handler@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.186.0.tgz#c1adc5f741e1ba9ad9d3fb13c9c2afdc88530a85" + integrity sha512-k2v4AAHRD76WnLg7arH94EvIclClo/YfuqO7NoQ6/KwOxjRhs4G6TgIsAZ9E0xmqoJoV81Xqy8H8ldfy9F8LEw== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/querystring-builder" "3.186.0" @@ -1211,7 +1238,8 @@ "@aws-sdk/fetch-http-handler@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.6.1.tgz#c5fb4a4ee158161fca52b220d2c11dddcda9b092" + integrity sha512-N8l6ZbwhINuWG5hsl625lmIQmVjzsqRPmlgh061jm5D90IhsM5/3A3wUxpB/k0av1dmuMRw/m0YtBU5w4LOwvw== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/querystring-builder" "3.6.1" @@ -1219,18 +1247,10 @@ "@aws-sdk/util-base64-browser" "3.6.1" tslib "^1.8.0" -"@aws-sdk/hash-blob-browser@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/hash-blob-browser/-/hash-blob-browser-3.6.1.tgz" - dependencies: - "@aws-sdk/chunked-blob-reader" "3.6.1" - "@aws-sdk/chunked-blob-reader-native" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/hash-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/hash-node/-/hash-node-3.186.0.tgz#8cb13aae8f46eb360fed76baf5062f66f27dfb70" + integrity sha512-G3zuK8/3KExDTxqrGqko+opOMLRF0BwcwekV/wm3GKIM/NnLhHblBs2zd/yi7VsEoWmuzibfp6uzxgFpEoJ87w== dependencies: "@aws-sdk/types" "3.186.0" "@aws-sdk/util-buffer-from" "3.186.0" @@ -1238,74 +1258,56 @@ "@aws-sdk/hash-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/hash-node/-/hash-node-3.6.1.tgz#72d75ec3b9c7e7f9b0c498805364f1f897165ce9" + integrity sha512-iKEpzpyaG9PYCnaOGwTIf0lffsF/TpsXrzAfnBlfeOU/3FbgniW2z/yq5xBbtMDtLobtOYC09kUFwDnDvuveSA== dependencies: "@aws-sdk/types" "3.6.1" "@aws-sdk/util-buffer-from" "3.6.1" tslib "^1.8.0" -"@aws-sdk/hash-stream-node@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/hash-stream-node/-/hash-stream-node-3.6.1.tgz" - dependencies: - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/invalid-dependency@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/invalid-dependency/-/invalid-dependency-3.186.0.tgz#aa6331ccf404cb659ec38483116080e4b82b0663" + integrity sha512-hjeZKqORhG2DPWYZ776lQ9YO3gjw166vZHZCZU/43kEYaCZHsF4mexHwHzreAY6RfS25cH60Um7dUh1aeVIpkw== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/invalid-dependency@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/invalid-dependency/-/invalid-dependency-3.6.1.tgz#fd2519f5482c6d6113d38a73b7143fd8d5b5b670" + integrity sha512-d0RLqK7yeDCZJKopnGmGXo2rYkQNE7sGKVmBHQD1j1kKZ9lWwRoJeWqo834JNPZzY5XRvZG5SuIjJ1kFy8LpyQ== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/is-array-buffer@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/is-array-buffer/-/is-array-buffer-3.186.0.tgz#7700e36f29d416c2677f4bf8816120f96d87f1b7" + integrity sha512-fObm+P6mjWYzxoFY4y2STHBmSdgKbIAXez0xope563mox62I8I4hhVPUCaDVydXvDpJv8tbedJMk0meJl22+xA== dependencies: tslib "^2.3.1" "@aws-sdk/is-array-buffer@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/is-array-buffer/-/is-array-buffer-3.6.1.tgz#96df5d64b2d599947f81b164d5d92623f85c659c" + integrity sha512-qm2iDJmCrxlQE2dsFG+TujPe7jw4DF+4RTrsFMhk/e3lOl3MAzQ6Fc2kXtgeUcVrZVFTL8fQvXE1ByYyI6WbCw== dependencies: tslib "^1.8.0" "@aws-sdk/md5-js@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/md5-js/-/md5-js-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/md5-js/-/md5-js-3.6.1.tgz#bffe21106fba0174d73ccc2c29ca1c5364d2af2d" + integrity sha512-lzCqkZF1sbzGFDyq1dI+lR3AmlE33rbC/JhZ5fzw3hJZvfZ6Beq3Su7YwDo65IWEu0zOKYaNywTeOloXP/CkxQ== dependencies: "@aws-sdk/types" "3.6.1" "@aws-sdk/util-utf8-browser" "3.6.1" tslib "^1.8.0" -"@aws-sdk/middleware-apply-body-checksum@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-apply-body-checksum/-/middleware-apply-body-checksum-3.6.1.tgz" - dependencies: - "@aws-sdk/is-array-buffer" "3.6.1" - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - -"@aws-sdk/middleware-bucket-endpoint@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.6.1.tgz" - dependencies: - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/types" "3.6.1" - "@aws-sdk/util-arn-parser" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/middleware-content-length@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-content-length/-/middleware-content-length-3.186.0.tgz#8cc7aeec527738c46fdaf4a48b17c5cbfdc7ce58" + integrity sha512-Ol3c1ks3IK1s+Okc/rHIX7w2WpXofuQdoAEme37gHeml+8FtUlWH/881h62xfMdf+0YZpRuYv/eM7lBmJBPNJw== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1313,7 +1315,8 @@ "@aws-sdk/middleware-content-length@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-content-length/-/middleware-content-length-3.6.1.tgz#f9c00a4045b2b56c1ff8bcbb3dec9c3d42332992" + integrity sha512-QRcocG9f5YjYzbjs2HjKla6ZIjvx8Y8tm1ZSFOPey81m18CLif1O7M3AtJXvxn+0zeSck9StFdhz5gfjVNYtDg== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1321,32 +1324,17 @@ "@aws-sdk/middleware-eventstream@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-eventstream/-/middleware-eventstream-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-eventstream/-/middleware-eventstream-3.186.0.tgz#64a66102ed2e182182473948f131f23dda84e729" + integrity sha512-7yjFiitTGgfKL6cHK3u3HYFnld26IW5aUAFuEd6ocR/FjliysfBd8g0g1bw3bRfIMgCDD8OIOkXK8iCk2iYGWQ== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/types" "3.186.0" tslib "^2.3.1" -"@aws-sdk/middleware-expect-continue@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.6.1.tgz" - dependencies: - "@aws-sdk/middleware-header-default" "3.6.1" - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - -"@aws-sdk/middleware-header-default@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-header-default/-/middleware-header-default-3.6.1.tgz" - dependencies: - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/middleware-host-header@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.186.0.tgz#fce4f1219ce1835e2348c787d8341080b0024e34" + integrity sha512-5bTzrRzP2IGwyF3QCyMGtSXpOOud537x32htZf344IvVjrqZF/P8CDfGTkHkeBCIH+wnJxjK+l/QBb3ypAMIqQ== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1354,36 +1342,33 @@ "@aws-sdk/middleware-host-header@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.6.1.tgz#6e1b4b95c5bfea5a4416fa32f11d8fa2e6edaeff" + integrity sha512-nwq8R2fGBRZQE0Fr/jiOgqfppfiTQCUoD8hyX3qSS7Qc2uqpsDOt2TnnoZl56mpQYkF/344IvMAkp+ew6wR73w== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/types" "3.6.1" tslib "^1.8.0" -"@aws-sdk/middleware-location-constraint@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.6.1.tgz" - dependencies: - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/middleware-logger@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.186.0.tgz#8a027fbbb1b8098ccc888bce51f34b000c0a0550" + integrity sha512-/1gGBImQT8xYh80pB7QtyzA799TqXtLZYQUohWAsFReYB7fdh5o+mu2rX0FNzZnrLIh2zBUNs4yaWGsnab4uXg== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/middleware-logger@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.6.1.tgz#78b3732cf188d5e4df13488db6418f7f98a77d6d" + integrity sha512-zxaSLpwKlja7JvK20UsDTxPqBZUo3rbDA1uv3VWwpxzOrEWSlVZYx/KLuyGWGkx9V71ZEkf6oOWWJIstS0wyQQ== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/middleware-recursion-detection@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.186.0.tgz#9d9d3212e9a954b557840bb80415987f4484487e" + integrity sha512-Za7k26Kovb4LuV5tmC6wcVILDCt0kwztwSlB991xk4vwNTja8kKxSt53WsYG8Q2wSaW6UOIbSoguZVyxbIY07Q== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1391,7 +1376,8 @@ "@aws-sdk/middleware-retry@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-retry/-/middleware-retry-3.186.0.tgz#0ff9af58d73855863683991a809b40b93c753ad1" + integrity sha512-/VI9emEKhhDzlNv9lQMmkyxx3GjJ8yPfXH3HuAeOgM1wx1BjCTLRYEWnTbQwq7BDzVENdneleCsGAp7yaj80Aw== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/service-error-classification" "3.186.0" @@ -1402,7 +1388,8 @@ "@aws-sdk/middleware-retry@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-retry/-/middleware-retry-3.6.1.tgz#202aadb1a3bf0e1ceabcd8319a5fa308b32db247" + integrity sha512-WHeo4d2jsXxBP+cec2SeLb0btYXwYXuE56WLmNt0RvJYmiBzytUeGJeRa9HuwV574kgigAuHGCeHlPO36G4Y0Q== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/service-error-classification" "3.6.1" @@ -1411,18 +1398,10 @@ tslib "^1.8.0" uuid "^3.0.0" -"@aws-sdk/middleware-sdk-s3@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.6.1.tgz" - dependencies: - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/types" "3.6.1" - "@aws-sdk/util-arn-parser" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/middleware-sdk-sts@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.186.0.tgz#18f3d6b7b42c1345b5733ac3e3119d370a403e94" + integrity sha512-GDcK0O8rjtnd+XRGnxzheq1V2jk4Sj4HtjrxW/ROyhzLOAOyyxutBt+/zOpDD6Gba3qxc69wE+Cf/qngOkEkDw== dependencies: "@aws-sdk/middleware-signing" "3.186.0" "@aws-sdk/property-provider" "3.186.0" @@ -1433,21 +1412,24 @@ "@aws-sdk/middleware-serde@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-serde/-/middleware-serde-3.186.0.tgz#f7944241ad5fb31cb15cd250c9e92147942b9ec6" + integrity sha512-6FEAz70RNf18fKL5O7CepPSwTKJEIoyG9zU6p17GzKMgPeFsxS5xO94Hcq5tV2/CqeHliebjqhKY7yi+Pgok7g== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/middleware-serde@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-serde/-/middleware-serde-3.6.1.tgz#734c7d16c2aa9ccc01f6cca5e2f6aa2993b6739d" + integrity sha512-EdQCFZRERfP3uDuWcPNuaa2WUR3qL1WFDXafhcx+7ywQxagdYqBUWKFJlLYi6njbkOKXFM+eHBzoXGF0OV3MJA== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/middleware-signing@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.186.0.tgz#37633bf855667b4841464e0044492d0aec5778b9" + integrity sha512-riCJYG/LlF/rkgVbHkr4xJscc0/sECzDivzTaUmfb9kJhAwGxCyNqnTvg0q6UO00kxSdEB9zNZI2/iJYVBijBQ== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/protocol-http" "3.186.0" @@ -1458,35 +1440,32 @@ "@aws-sdk/middleware-signing@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.6.1.tgz#e70a2f35d85d70e33c9fddfb54b9520f6382db16" + integrity sha512-1woKq+1sU3eausdl8BNdAMRZMkSYuy4mxhLsF0/qAUuLwo1eJLLUCOQp477tICawgu4O4q2OAyUHk7wMqYnQCg== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/signature-v4" "3.6.1" "@aws-sdk/types" "3.6.1" tslib "^1.8.0" -"@aws-sdk/middleware-ssec@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.6.1.tgz" - dependencies: - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/middleware-stack@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-stack/-/middleware-stack-3.186.0.tgz#da3445fe74b867ee6d7eec4f0dde28aaca1125d6" + integrity sha512-fENMoo0pW7UBrbuycPf+3WZ+fcUgP9PnQ0jcOK3WWZlZ9d2ewh4HNxLh4EE3NkNYj4VIUFXtTUuVNHlG8trXjQ== dependencies: tslib "^2.3.1" "@aws-sdk/middleware-stack@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-stack/-/middleware-stack-3.6.1.tgz#d7483201706bb5935a62884e9b60f425f1c6434f" + integrity sha512-EPsIxMi8LtCt7YwTFpWGlVGYJc0q4kwFbOssY02qfqdCnyqi2y5wo089dH7OdxUooQ0D7CPsXM1zTTuzvm+9Fw== dependencies: tslib "^1.8.0" "@aws-sdk/middleware-user-agent@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.186.0.tgz#6d881e9cea5fe7517e220f3a47c2f3557c7f27fc" + integrity sha512-fb+F2PF9DLKOVMgmhkr+ltN8ZhNJavTla9aqmbd01846OLEaN1n5xEnV7p8q5+EznVBWDF38Oz9Ae5BMt3Hs7w== dependencies: "@aws-sdk/protocol-http" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1494,7 +1473,8 @@ "@aws-sdk/middleware-user-agent@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.6.1.tgz#6845dfb3bc6187897f348c2c87dec833e6a65c99" + integrity sha512-YvXvwllNDVvxQ30vIqLsx+P6jjnfFEQUmhlv64n98gOme6h2BqoyQDcC3yHRGctuxRZEsR7W/H1ASTKC+iabbQ== dependencies: "@aws-sdk/protocol-http" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1502,7 +1482,8 @@ "@aws-sdk/node-config-provider@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-config-provider/-/node-config-provider-3.186.0.tgz#64259429d39f2ef5a76663162bf2e8db6032a322" + integrity sha512-De93mgmtuUUeoiKXU8pVHXWKPBfJQlS/lh1k2H9T2Pd9Tzi0l7p5ttddx4BsEx4gk+Pc5flNz+DeptiSjZpa4A== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/shared-ini-file-loader" "3.186.0" @@ -1511,7 +1492,8 @@ "@aws-sdk/node-config-provider@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-config-provider/-/node-config-provider-3.6.1.tgz#cb85d06329347fde566f08426f8714b1f65d2fb7" + integrity sha512-x2Z7lm0ZhHYqMybvkaI5hDKfBkaLaXhTDfgrLl9TmBZ3QHO4fIHgeL82VZ90Paol+OS+jdq2AheLmzbSxv3HrA== dependencies: "@aws-sdk/property-provider" "3.6.1" "@aws-sdk/shared-ini-file-loader" "3.6.1" @@ -1520,7 +1502,8 @@ "@aws-sdk/node-http-handler@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-http-handler/-/node-http-handler-3.186.0.tgz#8be1598a9187637a767dc337bf22fe01461e86eb" + integrity sha512-CbkbDuPZT9UNJ4dAZJWB3BV+Z65wFy7OduqGkzNNrKq6ZYMUfehthhUOTk8vU6RMe/0FkN+J0fFXlBx/bs/cHw== dependencies: "@aws-sdk/abort-controller" "3.186.0" "@aws-sdk/protocol-http" "3.186.0" @@ -1530,7 +1513,8 @@ "@aws-sdk/node-http-handler@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-http-handler/-/node-http-handler-3.6.1.tgz#4b65c4dcc0cf46ba44cb6c3bf29c5f817bb8d9a7" + integrity sha512-6XSaoqbm9ZF6T4UdBCcs/Gn2XclwBotkdjj46AxO+9vRAgZDP+lH/8WwZsvfqJhhRhS0qxWrks98WGJwmaTG8g== dependencies: "@aws-sdk/abort-controller" "3.6.1" "@aws-sdk/protocol-http" "3.6.1" @@ -1540,35 +1524,40 @@ "@aws-sdk/property-provider@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/property-provider/-/property-provider-3.186.0.tgz#af41e615662a2749d3ff7da78c41f79f4be95b3b" + integrity sha512-nWKqt36UW3xV23RlHUmat+yevw9up+T+953nfjcmCBKtgWlCWu/aUzewTRhKj3VRscbN+Wer95SBw9Lr/MMOlQ== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/property-provider@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/property-provider/-/property-provider-3.6.1.tgz#d973fc87d199d32c44d947e17f2ee2dd140a9593" + integrity sha512-2gR2DzDySXKFoj9iXLm1TZBVSvFIikEPJsbRmAZx5RBY+tp1IXWqZM6PESjaLdLg/ZtR0QhW2ZcRn0fyq2JfnQ== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/protocol-http@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/protocol-http/-/protocol-http-3.186.0.tgz#99115870846312dd4202b5e2cc68fe39324b9bfa" + integrity sha512-l/KYr/UBDUU5ginqTgHtFfHR3X6ljf/1J1ThIiUg3C3kVC/Zwztm7BEOw8hHRWnWQGU/jYasGYcrcPLdQqFZyQ== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/protocol-http@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/protocol-http/-/protocol-http-3.6.1.tgz#d3d276846bec19ddb339d06bbc48116d17bbc656" + integrity sha512-WkQz7ncVYTLvCidDfXWouDzqxgSNPZDz3Bql+7VhZeITnzAEcr4hNMyEqMAVYBVugGmkG2W6YiUqNNs1goOcDA== dependencies: "@aws-sdk/types" "3.6.1" tslib "^1.8.0" "@aws-sdk/querystring-builder@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-builder/-/querystring-builder-3.186.0.tgz#a380db0e1c71004932d9e2f3e6dc6761d1165c47" + integrity sha512-mweCpuLufImxfq/rRBTEpjGuB4xhQvbokA+otjnUxlPdIobytLqEs7pCGQfLzQ7+1ZMo8LBXt70RH4A2nSX/JQ== dependencies: "@aws-sdk/types" "3.186.0" "@aws-sdk/util-uri-escape" "3.186.0" @@ -1576,7 +1565,8 @@ "@aws-sdk/querystring-builder@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-builder/-/querystring-builder-3.6.1.tgz#4c769829a3760ef065d0d3801f297a7f0cd324d4" + integrity sha512-ESe255Yl6vB1AMNqaGSQow3TBYYnpw0AFjE40q2VyiNrkbaqKmW2EzjeCy3wEmB1IfJDHy3O12ZOMUMOnjFT8g== dependencies: "@aws-sdk/types" "3.6.1" "@aws-sdk/util-uri-escape" "3.6.1" @@ -1584,54 +1574,49 @@ "@aws-sdk/querystring-parser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-parser/-/querystring-parser-3.186.0.tgz#4db6d31ad4df0d45baa2a35e371fbaa23e45ddd2" + integrity sha512-0iYfEloghzPVXJjmnzHamNx1F1jIiTW9Svy5ZF9LVqyr/uHZcQuiWYsuhWloBMLs8mfWarkZM02WfxZ8buAuhg== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/querystring-parser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.6.1.tgz" - dependencies: - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - -"@aws-sdk/s3-request-presigner@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/querystring-parser/-/querystring-parser-3.6.1.tgz#e3fa5a710429c7dd411e802a0b82beb48012cce2" + integrity sha512-hh6dhqamKrWWaDSuO2YULci0RGwJWygoy8hpCRxs/FpzzHIcbm6Cl6Jhrn5eKBzOBv+PhCcYwbfad0kIZZovcQ== dependencies: - "@aws-sdk/protocol-http" "3.6.1" - "@aws-sdk/signature-v4" "3.6.1" - "@aws-sdk/smithy-client" "3.6.1" "@aws-sdk/types" "3.6.1" - "@aws-sdk/util-create-request" "3.6.1" - "@aws-sdk/util-format-url" "3.6.1" tslib "^1.8.0" "@aws-sdk/service-error-classification@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/service-error-classification/-/service-error-classification-3.186.0.tgz#6e4e1d4b53d68bd28c28d9cf0b3b4cb6a6a59dbb" + integrity sha512-DRl3ORk4tF+jmH5uvftlfaq0IeKKpt0UPAOAFQ/JFWe+TjOcQd/K+VC0iiIG97YFp3aeFmH1JbEgsNxd+8fdxw== "@aws-sdk/service-error-classification@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/service-error-classification/-/service-error-classification-3.6.1.tgz#296fe62ac61338341e8a009c9a2dab013a791903" + integrity sha512-kZ7ZhbrN1f+vrSRkTJvXsu7BlOyZgym058nPA745+1RZ1Rtv4Ax8oknf2RvJyj/1qRUi8LBaAREjzQ3C8tmLBA== "@aws-sdk/shared-ini-file-loader@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.186.0.tgz#a2d285bb3c4f8d69f7bfbde7a5868740cd3f7795" + integrity sha512-2FZqxmICtwN9CYid4dwfJSz/gGFHyStFQ3HCOQ8DsJUf2yREMSBsVmKqsyWgOrYcQ98gPcD5GIa7QO5yl3XF6A== dependencies: "@aws-sdk/types" "3.186.0" tslib "^2.3.1" "@aws-sdk/shared-ini-file-loader@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.6.1.tgz#2b7182cbb0d632ad7c9712bebffdeee24a6f7eb6" + integrity sha512-BnLHtsNLOoow6rPV+QVi6jnovU5g1m0YzoUG0BQYZ1ALyVlWVr0VvlUX30gMDfdYoPMp+DHvF8GXdMuGINq6kQ== dependencies: tslib "^1.8.0" "@aws-sdk/signature-v4@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4/-/signature-v4-3.186.0.tgz#bbd56e71af95548abaeec6307ea1dfe7bd26b4e4" + integrity sha512-18i96P5c4suMqwSNhnEOqhq4doqqyjH4fn0YV3F8TkekHPIWP4mtIJ0PWAN4eievqdtcKgD/GqVO6FaJG9texw== dependencies: "@aws-sdk/is-array-buffer" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1642,7 +1627,8 @@ "@aws-sdk/signature-v4@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4/-/signature-v4-3.6.1.tgz#b20a3cf3e891131f83b012651f7d4af2bf240611" + integrity sha512-EAR0qGVL4AgzodZv4t+BSuBfyOXhTNxDxom50IFI1MqidR9vI6avNZKcPHhgXbm7XVcsDGThZKbzQ2q7MZ2NTA== dependencies: "@aws-sdk/is-array-buffer" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1652,7 +1638,8 @@ "@aws-sdk/smithy-client@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/smithy-client/-/smithy-client-3.186.0.tgz#67514544fb55d7eff46300e1e73311625cf6f916" + integrity sha512-rdAxSFGSnrSprVJ6i1BXi65r4X14cuya6fYe8dSdgmFSa+U2ZevT97lb3tSINCUxBGeMXhENIzbVGkRZuMh+DQ== dependencies: "@aws-sdk/middleware-stack" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1660,7 +1647,8 @@ "@aws-sdk/smithy-client@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/smithy-client/-/smithy-client-3.6.1.tgz#683fef89802e318922f8529a5433592d71a7ce9d" + integrity sha512-AVpRK4/iUxNeDdAm8UqP0ZgtgJMQeWcagTylijwelhWXyXzHUReY1sgILsWcdWnoy6gq845W7K2VBhBleni8+w== dependencies: "@aws-sdk/middleware-stack" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1668,21 +1656,26 @@ "@aws-sdk/types@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.186.0.tgz#f6fb6997b6a364f399288bfd5cd494bc680ac922" + integrity sha512-NatmSU37U+XauMFJCdFI6nougC20JUFZar+ump5wVv0i54H+2Refg1YbFDxSs0FY28TSB9jfhWIpfFBmXgL5MQ== -"@aws-sdk/types@3.6.1", "@aws-sdk/types@^3.1.0": +"@aws-sdk/types@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.6.1.tgz#00686db69e998b521fcd4a5f81ef0960980f80c4" + integrity sha512-4Dx3eRTrUHLxhFdLJL8zdNGzVsJfAxtxPYYGmIddUkO2Gj3WA1TGjdfG4XN/ClI6e1XonCHafQX3UYO/mgnH3g== -"@aws-sdk/types@^3.110.0": - version "3.347.0" - resolved "https://registry.npmjs.org/@aws-sdk/types/-/types-3.347.0.tgz" +"@aws-sdk/types@^3.1.0", "@aws-sdk/types@^3.110.0": + version "3.391.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.391.0.tgz#d49b0130943f0c60fd9bc99b2a47ec9720e2dd07" + integrity sha512-QpYVFKMOnzHz/JMj/b8wb18qxiT92U/5r5MmtRz2R3LOH6ooTO96k4ozXCrYr0qNed1PAnOj73rPrrH2wnCJKQ== dependencies: + "@smithy/types" "^2.2.0" tslib "^2.5.0" "@aws-sdk/url-parser-native@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/url-parser-native/-/url-parser-native-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/url-parser-native/-/url-parser-native-3.6.1.tgz#a5e787f98aafa777e73007f9490df334ef3389a2" + integrity sha512-3O+ktsrJoE8YQCho9L41YXO8EWILXrSeES7amUaV3mgIV5w4S3SB/r4RkmylpqRpQF7Ry8LFiAnMqH1wa4WBPA== dependencies: "@aws-sdk/querystring-parser" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1691,7 +1684,8 @@ "@aws-sdk/url-parser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/url-parser/-/url-parser-3.186.0.tgz#e42f845cd405c1920fdbdcc796a350d4ace16ae9" + integrity sha512-jfdJkKqJZp8qjjwEjIGDqbqTuajBsddw02f86WiL8bPqD8W13/hdqbG4Fpwc+Bm6GwR6/4MY6xWXFnk8jDUKeA== dependencies: "@aws-sdk/querystring-parser" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1699,100 +1693,98 @@ "@aws-sdk/url-parser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/url-parser/-/url-parser-3.6.1.tgz#f5d89fb21680469a61cb9fe08a7da3ef887884dd" + integrity sha512-pWFIePDx0PMCleQRsQDWoDl17YiijOLj0ZobN39rQt+wv5PhLSZDz9PgJsqS48nZ6hqsKgipRcjiBMhn5NtFcQ== dependencies: "@aws-sdk/querystring-parser" "3.6.1" "@aws-sdk/types" "3.6.1" tslib "^1.8.0" -"@aws-sdk/util-arn-parser@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.6.1.tgz" - dependencies: - tslib "^1.8.0" - "@aws-sdk/util-base64-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-base64-browser/-/util-base64-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-browser/-/util-base64-browser-3.186.0.tgz#0310482752163fa819718ce9ea9250836b20346d" + integrity sha512-TpQL8opoFfzTwUDxKeon/vuc83kGXpYqjl6hR8WzmHoQgmFfdFlV+0KXZOohra1001OP3FhqvMqaYbO8p9vXVQ== dependencies: tslib "^2.3.1" "@aws-sdk/util-base64-browser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-base64-browser/-/util-base64-browser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-browser/-/util-base64-browser-3.6.1.tgz#eddea1311b41037fc3fddd889d3e0a9882363215" + integrity sha512-+DHAIgt0AFARDVC7J0Z9FkSmJhBMlkYdOPeAAgO0WaQoKj7rtsLQJ7P3v3aS1paKN5/sk5xNY7ziVB6uHtOvHA== dependencies: tslib "^1.8.0" "@aws-sdk/util-base64-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-base64-node/-/util-base64-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-node/-/util-base64-node-3.186.0.tgz#500bd04b1ef7a6a5c0a2d11c0957a415922e05c7" + integrity sha512-wH5Y/EQNBfGS4VkkmiMyZXU+Ak6VCoFM1GKWopV+sj03zR2D4FHexi4SxWwEBMpZCd6foMtihhbNBuPA5fnh6w== dependencies: "@aws-sdk/util-buffer-from" "3.186.0" tslib "^2.3.1" "@aws-sdk/util-base64-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-base64-node/-/util-base64-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-base64-node/-/util-base64-node-3.6.1.tgz#a79c233861e50d3a30728c72b736afdee07d4009" + integrity sha512-oiqzpsvtTSS92+cL3ykhGd7t3qBJKeHvrgOwUyEf1wFWHQ2DPJR+dIMy5rMFRXWLKCl3w7IddY2rJCkLYMjaqQ== dependencies: "@aws-sdk/util-buffer-from" "3.6.1" tslib "^1.8.0" "@aws-sdk/util-body-length-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.186.0.tgz#a898eda9f874f6974a9c5c60fcc76bcb6beac820" + integrity sha512-zKtjkI/dkj9oGkjo+7fIz+I9KuHrVt1ROAeL4OmDESS8UZi3/O8uMDFMuCp8jft6H+WFuYH6qRVWAVwXMiasXw== dependencies: tslib "^2.3.1" "@aws-sdk/util-body-length-browser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.6.1.tgz#2e8088f2d9a5a8258b4f56079a8890f538c2797e" + integrity sha512-IdWwE3rm/CFDk2F+IwTZOFTnnNW5SB8y1lWiQ54cfc7y03hO6jmXNnpZGZ5goHhT+vf1oheNQt1J47m0pM/Irw== dependencies: tslib "^1.8.0" "@aws-sdk/util-body-length-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-node/-/util-body-length-node-3.186.0.tgz#95efbacbd13cb739b942c126c5d16ecf6712d4db" + integrity sha512-U7Ii8u8Wvu9EnBWKKeuwkdrWto3c0j7LG677Spe6vtwWkvY70n9WGfiKHTgBpVeLNv8jvfcx5+H0UOPQK1o9SQ== dependencies: tslib "^2.3.1" "@aws-sdk/util-body-length-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-body-length-node/-/util-body-length-node-3.6.1.tgz#6e4f2eae46c5a7b0417a12ca7f4b54c390d4cacd" + integrity sha512-CUG3gc18bSOsqViQhB3M4AlLpAWV47RE6yWJ6rLD0J6/rSuzbwbjzxM39q0YTAVuSo/ivdbij+G9c3QCirC+QQ== dependencies: tslib "^1.8.0" "@aws-sdk/util-buffer-from@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-buffer-from/-/util-buffer-from-3.186.0.tgz#01f7edb683d2f40374d0ca8ef2d16346dc8040a1" + integrity sha512-be2GCk2lsLWg/2V5Y+S4/9pOMXhOQo4DR4dIqBdR2R+jrMMHN9Xsr5QrkT6chcqLaJ/SBlwiAEEi3StMRmCOXA== dependencies: "@aws-sdk/is-array-buffer" "3.186.0" tslib "^2.3.1" "@aws-sdk/util-buffer-from@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-buffer-from/-/util-buffer-from-3.6.1.tgz#24184ce74512f764d84002201b7f5101565e26f9" + integrity sha512-OGUh2B5NY4h7iRabqeZ+EgsrzE1LUmNFzMyhoZv0tO4NExyfQjxIYXLQQvydeOq9DJUbCw+yrRZrj8vXNDQG+g== dependencies: "@aws-sdk/is-array-buffer" "3.6.1" tslib "^1.8.0" "@aws-sdk/util-config-provider@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-config-provider/-/util-config-provider-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-config-provider/-/util-config-provider-3.186.0.tgz#52ce3711edceadfac1b75fccc7c615e90c33fb2f" + integrity sha512-71Qwu/PN02XsRLApyxG0EUy/NxWh/CXxtl2C7qY14t+KTiRapwbDkdJ1cMsqYqghYP4BwJoj1M+EFMQSSlkZQQ== dependencies: tslib "^2.3.1" -"@aws-sdk/util-create-request@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-create-request/-/util-create-request-3.6.1.tgz" - dependencies: - "@aws-sdk/middleware-stack" "3.6.1" - "@aws-sdk/smithy-client" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/util-defaults-mode-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-browser/-/util-defaults-mode-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-defaults-mode-browser/-/util-defaults-mode-browser-3.186.0.tgz#d30b2f572e273d7d98287274c37c9ee00b493507" + integrity sha512-U8GOfIdQ0dZ7RRVpPynGteAHx4URtEh+JfWHHVfS6xLPthPHWTbyRhkQX++K/F8Jk+T5U8Anrrqlea4TlcO2DA== dependencies: "@aws-sdk/property-provider" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1801,7 +1793,8 @@ "@aws-sdk/util-defaults-mode-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-node/-/util-defaults-mode-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-defaults-mode-node/-/util-defaults-mode-node-3.186.0.tgz#8572453ba910fd2ab08d2cfee130ce5a0db83ba7" + integrity sha512-N6O5bpwCiE4z8y7SPHd7KYlszmNOYREa+mMgtOIXRU3VXSEHVKVWTZsHKvNTTHpW0qMqtgIvjvXCo3vsch5l3A== dependencies: "@aws-sdk/config-resolver" "3.186.0" "@aws-sdk/credential-provider-imds" "3.186.0" @@ -1810,53 +1803,52 @@ "@aws-sdk/types" "3.186.0" tslib "^2.3.1" -"@aws-sdk/util-format-url@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.6.1.tgz" - dependencies: - "@aws-sdk/querystring-builder" "3.6.1" - "@aws-sdk/types" "3.6.1" - tslib "^1.8.0" - "@aws-sdk/util-hex-encoding@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.186.0.tgz#7ed58b923997c6265f4dce60c8704237edb98895" + integrity sha512-UL9rdgIZz1E/jpAfaKH8QgUxNK9VP5JPgoR0bSiaefMjnsoBh0x/VVMsfUyziOoJCMLebhJzFowtwrSKEGsxNg== dependencies: tslib "^2.3.1" "@aws-sdk/util-hex-encoding@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.6.1.tgz#84954fcc47b74ffbd2911ba5113e93bd9b1c6510" + integrity sha512-pzsGOHtU2eGca4NJgFg94lLaeXDOg8pcS9sVt4f9LmtUGbrqRveeyBv0XlkHeZW2n0IZBssPHipVYQFlk7iaRA== dependencies: tslib "^1.8.0" "@aws-sdk/util-locate-window@^3.0.0": version "3.310.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz#b071baf050301adee89051032bd4139bba32cc40" + integrity sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w== dependencies: tslib "^2.5.0" "@aws-sdk/util-middleware@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-middleware/-/util-middleware-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-middleware/-/util-middleware-3.186.0.tgz#ba2e286b206cbead306b6d2564f9d0495f384b40" + integrity sha512-fddwDgXtnHyL9mEZ4s1tBBsKnVQHqTUmFbZKUUKPrg9CxOh0Y/zZxEa5Olg/8dS/LzM1tvg0ATkcyd4/kEHIhg== dependencies: tslib "^2.3.1" "@aws-sdk/util-uri-escape@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-uri-escape/-/util-uri-escape-3.186.0.tgz#1752a93dfe58ec88196edb6929806807fd8986da" + integrity sha512-imtOrJFpIZAipAg8VmRqYwv1G/x4xzyoxOJ48ZSn1/ZGnKEEnB6n6E9gwYRebi4mlRuMSVeZwCPLq0ey5hReeQ== dependencies: tslib "^2.3.1" "@aws-sdk/util-uri-escape@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-uri-escape/-/util-uri-escape-3.6.1.tgz#433e87458bb510d0e457a86c0acf12b046a5068c" + integrity sha512-tgABiT71r0ScRJZ1pMX0xO0QPMMiISCtumph50IU5VDyZWYgeIxqkMhIcrL1lX0QbNCMgX0n6rZxGrrbjDNavA== dependencies: tslib "^1.8.0" "@aws-sdk/util-user-agent-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.186.0.tgz#02e214887d30a69176c6a6c2d6903ce774b013b4" + integrity sha512-fbRcTTutMk4YXY3A2LePI4jWSIeHOT8DaYavpc/9Xshz/WH9RTGMmokeVOcClRNBeDSi5cELPJJ7gx6SFD3ZlQ== dependencies: "@aws-sdk/types" "3.186.0" bowser "^2.11.0" @@ -1864,7 +1856,8 @@ "@aws-sdk/util-user-agent-browser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.6.1.tgz#11b9cc8743392761adb304460f4b54ec8acc2ee6" + integrity sha512-KhJ4VED4QpuBVPXoTjb5LqspX1xHWJTuL8hbPrKfxj+cAaRRW2CNEe7PPy2CfuHtPzP3dU3urtGTachbwNb0jg== dependencies: "@aws-sdk/types" "3.6.1" bowser "^2.11.0" @@ -1872,7 +1865,8 @@ "@aws-sdk/util-user-agent-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.186.0.tgz#1ef74973442c8650c7b64ff2fd15cf3c09d8c004" + integrity sha512-oWZR7hN6NtOgnT6fUvHaafgbipQc2xJCRB93XHiF9aZGptGNLJzznIOP7uURdn0bTnF73ejbUXWLQIm8/6ue6w== dependencies: "@aws-sdk/node-config-provider" "3.186.0" "@aws-sdk/types" "3.186.0" @@ -1880,7 +1874,8 @@ "@aws-sdk/util-user-agent-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.6.1.tgz#98384095fa67d098ae7dd26f3ccaad028e8aebb6" + integrity sha512-PWwL5EDRwhkXX40m5jjgttlBmLA7vDhHBen1Jcle0RPIDFRVPSE7GgvLF3y4r3SNH0WD6hxqadT50bHQynXW6w== dependencies: "@aws-sdk/node-config-provider" "3.6.1" "@aws-sdk/types" "3.6.1" @@ -1888,87 +1883,99 @@ "@aws-sdk/util-utf8-browser@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.186.0.tgz#5fee6385cfc3effa2be704edc2998abfd6633082" + integrity sha512-n+IdFYF/4qT2WxhMOCeig8LndDggaYHw3BJJtfIBZRiS16lgwcGYvOUmhCkn0aSlG1f/eyg9YZHQG0iz9eLdHQ== dependencies: tslib "^2.3.1" -"@aws-sdk/util-utf8-browser@3.6.1", "@aws-sdk/util-utf8-browser@^3.0.0": +"@aws-sdk/util-utf8-browser@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.6.1.tgz#97a8770cae9d29218adc0f32c7798350261377c7" + integrity sha512-gZPySY6JU5gswnw3nGOEHl3tYE7vPKvtXGYoS2NRabfDKRejFvu+4/nNW6SSpoOxk6LSXsrWB39NO51k+G4PVA== dependencies: tslib "^1.8.0" +"@aws-sdk/util-utf8-browser@^3.0.0": + version "3.259.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" + integrity sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw== + dependencies: + tslib "^2.3.1" + "@aws-sdk/util-utf8-node@3.186.0": version "3.186.0" - resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-node/-/util-utf8-node-3.186.0.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-node/-/util-utf8-node-3.186.0.tgz#722d9b0f5675ae2e9d79cf67322126d9c9d8d3d8" + integrity sha512-7qlE0dOVdjuRbZTb7HFywnHHCrsN7AeQiTnsWT63mjXGDbPeUWQQw3TrdI20um3cxZXnKoeudGq8K6zbXyQ4iA== dependencies: "@aws-sdk/util-buffer-from" "3.186.0" tslib "^2.3.1" "@aws-sdk/util-utf8-node@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-utf8-node/-/util-utf8-node-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-node/-/util-utf8-node-3.6.1.tgz#18534c2069b61f5739ee4cdc70060c9f4b4c4c4f" + integrity sha512-4s0vYfMUn74XLn13rUUhNsmuPMh0j1d4rF58wXtjlVUU78THxonnN8mbCLC48fI3fKDHTmDDkeEqy7+IWP9VyA== dependencies: "@aws-sdk/util-buffer-from" "3.6.1" tslib "^1.8.0" "@aws-sdk/util-waiter@3.6.1": version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/util-waiter/-/util-waiter-3.6.1.tgz" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-waiter/-/util-waiter-3.6.1.tgz#5c66c2da33ff98468726fefddc2ca7ac3352c17d" + integrity sha512-CQMRteoxW1XZSzPBVrTsOTnfzsEGs8N/xZ8BuBnXLBjoIQmRKVxIH9lgphm1ohCtVHoSWf28XH/KoOPFULQ4Tg== dependencies: "@aws-sdk/abort-controller" "3.6.1" "@aws-sdk/types" "3.6.1" tslib "^1.8.0" -"@aws-sdk/xml-builder@3.6.1": - version "3.6.1" - resolved "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.6.1.tgz" +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.10", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.10.tgz#1c20e612b768fefa75f6e90d6ecb86329247f0a3" + integrity sha512-/KKIMG4UEL35WmI9OlvMhurwtytjvXoFcGNrOvyG9zIzA8YmPjVtIZUf7b05+TPO7G7/GEmLHDaoCgACHl9hhA== dependencies: - tslib "^1.8.0" + "@babel/highlight" "^7.22.10" + chalk "^2.4.2" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" - dependencies: - "@babel/highlight" "^7.22.5" - -"@babel/compat-data@^7.22.5", "@babel/compat-data@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.6.tgz#15606a20341de59ba02cd2fcc5086fcbe73bf544" +"@babel/compat-data@^7.22.5", "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" + integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== "@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": - version "7.22.8" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.8.tgz#386470abe884302db9c82e8e5e87be9e46c86785" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.10.tgz#aad442c7bcd1582252cb4576747ace35bc122f35" + integrity sha512-fTmqbbUBAwCcre6zPzNngvsI0aNrPZe77AeqvDxWM9Nm+04RrJ3CAmGHA9f7lJQY6ZMhRztNemy4uslDxTX4Qw== dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.7" - "@babel/helper-compilation-targets" "^7.22.6" - "@babel/helper-module-transforms" "^7.22.5" - "@babel/helpers" "^7.22.6" - "@babel/parser" "^7.22.7" + "@babel/code-frame" "^7.22.10" + "@babel/generator" "^7.22.10" + "@babel/helper-compilation-targets" "^7.22.10" + "@babel/helper-module-transforms" "^7.22.9" + "@babel/helpers" "^7.22.10" + "@babel/parser" "^7.22.10" "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.8" - "@babel/types" "^7.22.5" - "@nicolo-ribaudo/semver-v6" "^6.3.3" + "@babel/traverse" "^7.22.10" + "@babel/types" "^7.22.10" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.2" json5 "^2.2.2" + semver "^6.3.1" "@babel/eslint-parser@^7.16.3": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.7.tgz#d2807fbd1fa4376162716da63dfd3c69a2249fed" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.10.tgz#bfdf3d1b32ad573fe7c1c3447e0b485e3a41fd09" + integrity sha512-0J8DNPRXQRLeR9rPaUMM3fA+RbixjnVLe/MRMYCkp3hzgsSuxCHQ8NN8xQG1wIHKJ4a1DTROTvFJdW+B5/eOsg== dependencies: "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" - "@nicolo-ribaudo/semver-v6" "^6.3.3" eslint-visitor-keys "^2.1.0" + semver "^6.3.1" -"@babel/generator@^7.22.7", "@babel/generator@^7.7.2": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.7.tgz#a6b8152d5a621893f2c9dacf9a4e286d520633d5" +"@babel/generator@^7.22.10", "@babel/generator@^7.7.2": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.10.tgz#c92254361f398e160645ac58831069707382b722" + integrity sha512-79KIf7YiWjjdZ81JnLujDRApWtl7BxTqWD88+FFdQEIOG8LJ0etDOM7CXuIgGJa55sGOwZVwuEsaLEm0PJ5/+A== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.22.10" "@jridgewell/gen-mapping" "^0.3.2" "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" @@ -1976,50 +1983,56 @@ "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: "@babel/types" "^7.22.5" "@babel/helper-builder-binary-assignment-operator-visitor@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.5.tgz#a3f4758efdd0190d8927fcffd261755937c71878" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.10.tgz#573e735937e99ea75ea30788b57eb52fab7468c9" + integrity sha512-Av0qubwDQxC56DoUReVDeLfMEjYYSN1nZrTUrWkXd7hpU73ymRANkbuDm3yni9npkn+RXy9nNbEJZEzXr7xrfQ== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.22.10" -"@babel/helper-compilation-targets@^7.22.5", "@babel/helper-compilation-targets@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.6.tgz#e30d61abe9480aa5a83232eb31c111be922d2e52" +"@babel/helper-compilation-targets@^7.22.10", "@babel/helper-compilation-targets@^7.22.5", "@babel/helper-compilation-targets@^7.22.6": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.10.tgz#01d648bbc25dd88f513d862ee0df27b7d4e67024" + integrity sha512-JMSwHD4J7SLod0idLq5PKgI+6g/hLD/iuWBq08ZX49xE14VpVEojJ5rHWptpirV2j020MvypRLAXAO50igCJ5Q== dependencies: - "@babel/compat-data" "^7.22.6" + "@babel/compat-data" "^7.22.9" "@babel/helper-validator-option" "^7.22.5" - "@nicolo-ribaudo/semver-v6" "^6.3.3" browserslist "^4.21.9" lru-cache "^5.1.1" + semver "^6.3.1" -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.22.5", "@babel/helper-create-class-features-plugin@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.6.tgz#58564873c889a6fea05a538e23f9f6d201f10950" +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.22.10", "@babel/helper-create-class-features-plugin@^7.22.5": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.10.tgz#dd2612d59eac45588021ac3d6fa976d08f4e95a3" + integrity sha512-5IBb77txKYQPpOEdUdIhBx8VrZyDCQ+H82H0+5dX1TmuscP5vJKEE3cKurjtIw/vFwzbVH48VweE78kVDBrqjA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-environment-visitor" "^7.22.5" "@babel/helper-function-name" "^7.22.5" "@babel/helper-member-expression-to-functions" "^7.22.5" "@babel/helper-optimise-call-expression" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.9" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@nicolo-ribaudo/semver-v6" "^6.3.3" + semver "^6.3.1" "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.5": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.6.tgz#87afd63012688ad792de430ceb3b6dc28e4e7a40" + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.9.tgz#9d8e61a8d9366fe66198f57c40565663de0825f6" + integrity sha512-+svjVa/tFwsNSG4NEy1h85+HQ5imbT92Q5/bgtS7P0GTQlP8WuFdqsiABmQouhiFGyV66oGxZFpeYHza1rNsKw== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" - "@nicolo-ribaudo/semver-v6" "^6.3.3" regexpu-core "^5.3.1" + semver "^6.3.1" -"@babel/helper-define-polyfill-provider@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.1.tgz#af1429c4a83ac316a6a8c2cc8ff45cb5d2998d3a" +"@babel/helper-define-polyfill-provider@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.2.tgz#82c825cadeeeee7aad237618ebbe8fa1710015d7" + integrity sha512-k0qnnOqHn5dK9pZpfD5XXZ9SojAITdCKRn2Lp6rnDGzIbaP0rHyMPk/4wsSxVBVz4RfN0q6VpXWP2pDGIoQ7hw== dependencies: "@babel/helper-compilation-targets" "^7.22.6" "@babel/helper-plugin-utils" "^7.22.5" @@ -2030,10 +2043,12 @@ "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" + integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== "@babel/helper-function-name@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" + integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== dependencies: "@babel/template" "^7.22.5" "@babel/types" "^7.22.5" @@ -2041,132 +2056,144 @@ "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: "@babel/types" "^7.22.5" "@babel/helper-member-expression-to-functions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.22.5.tgz#0a7c56117cad3372fbf8d2fb4bf8f8d64a1e76b2" + integrity sha512-aBiH1NKMG0H2cGZqspNvsaBe6wNGjbJjuLy29aU+eDZjSbbN53BaxlpB02xm9v34pLTZ1nIQPFYn2qMZoa5BQQ== dependencies: "@babel/types" "^7.22.5" "@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" + integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-module-transforms@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" +"@babel/helper-module-transforms@^7.22.5", "@babel/helper-module-transforms@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz#92dfcb1fbbb2bc62529024f72d942a8c97142129" + integrity sha512-t+WA2Xn5K+rTeGtC8jCsdAH52bjggG5TKRuRrAGNM/mjIbO4GxvlLMFOEz9wXY5I2XQ60PMFsAG2WIcG82dQMQ== dependencies: "@babel/helper-environment-visitor" "^7.22.5" "@babel/helper-module-imports" "^7.22.5" "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" "@babel/helper-validator-identifier" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" + integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== dependencies: "@babel/types" "^7.22.5" "@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.22.5" - resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== -"@babel/helper-remap-async-to-generator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.5.tgz#14a38141a7bf2165ad38da61d61cf27b43015da2" +"@babel/helper-remap-async-to-generator@^7.22.5", "@babel/helper-remap-async-to-generator@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.9.tgz#53a25b7484e722d7efb9c350c75c032d4628de82" + integrity sha512-8WWC4oR4Px+tr+Fp0X3RHDVfINGpF3ad1HIbrc8A77epiR6eMMc6jsgozkzT2uDiOOdoS9cLIQ+XD2XvI2WSmQ== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-wrap-function" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/helper-wrap-function" "^7.22.9" -"@babel/helper-replace-supers@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.5.tgz#71bc5fb348856dea9fdc4eafd7e2e49f585145dc" +"@babel/helper-replace-supers@^7.22.5", "@babel/helper-replace-supers@^7.22.9": + version "7.22.9" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.9.tgz#cbdc27d6d8d18cd22c81ae4293765a5d9afd0779" + integrity sha512-LJIKvvpgPOPUThdYqcX6IXRuIcTkcAub0IaDRGCZH0p5GPUp7PhRU9QVgFcDDd51BaPkk77ZjqFwh6DZTAEmGg== dependencies: "@babel/helper-environment-visitor" "^7.22.5" "@babel/helper-member-expression-to-functions" "^7.22.5" "@babel/helper-optimise-call-expression" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" "@babel/helper-simple-access@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== dependencies: "@babel/types" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" + integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.22.5", "@babel/helper-split-export-declaration@^7.22.6": +"@babel/helper-split-export-declaration@^7.22.6": version "7.22.6" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== dependencies: "@babel/types" "^7.22.5" "@babel/helper-string-parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== "@babel/helper-validator-option@^7.22.5": version "7.22.5" - resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" + integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== -"@babel/helper-wrap-function@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.5.tgz#44d205af19ed8d872b4eefb0d2fa65f45eb34f06" +"@babel/helper-wrap-function@^7.22.9": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.10.tgz#d845e043880ed0b8c18bd194a12005cb16d2f614" + integrity sha512-OnMhjWjuGYtdoO3FmsEFWvBStBAe2QOgwOLsLNDjN+aaiMD8InJk1/O3HSD8lkqTjCgg5YI34Tz15KNNA3p+nQ== dependencies: "@babel/helper-function-name" "^7.22.5" "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/types" "^7.22.10" -"@babel/helpers@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.6.tgz#8e61d3395a4f0c5a8060f309fb008200969b5ecd" +"@babel/helpers@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.10.tgz#ae6005c539dfbcb5cd71fb51bfc8a52ba63bc37a" + integrity sha512-a41J4NW8HyZa1I1vAndrraTlPZ/eZoga2ZgS7fEr0tZJGVU4xqdE80CEm0CcNjha5EZ8fTBYLKHF0kqDUuAwQw== dependencies: "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.6" - "@babel/types" "^7.22.5" + "@babel/traverse" "^7.22.10" + "@babel/types" "^7.22.10" -"@babel/highlight@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" +"@babel/highlight@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.10.tgz#02a3f6d8c1cb4521b2fd0ab0da8f4739936137d7" + integrity sha512-78aUtVcT7MUscr0K5mIEnkwxPE0MaxkR5RxRwuHaQ+JuU5AmTPhY+do2mdzVTnIJJpyBglql2pehuBIWHug+WQ== dependencies: "@babel/helper-validator-identifier" "^7.22.5" - chalk "^2.0.0" + chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5", "@babel/parser@^7.22.7": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.7.tgz#df8cf085ce92ddbdbf668a7f186ce848c9036cae" +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.10", "@babel/parser@^7.22.5": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.10.tgz#e37634f9a12a1716136c44624ef54283cabd3f55" + integrity sha512-lNbdGsQb9ekfsnjFGhEiF4hfFqGgfOP3H3d27re3n+CGhNuTSUEQdfWk556sTLNTloczcdM5TYF2LhzmDQKyvQ== "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" + integrity sha512-NP1M5Rf+u2Gw9qfSO4ihjcTGW5zXTi36ITLd4/EoAcEhIZ0yjMqmftDNl3QC19CX7olhrjpyU454g/2W7X0jvQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.5.tgz#fef09f9499b1f1c930da8a0c419db42167d792ca" + integrity sha512-31Bb65aZaUwqCbWMnZPduIZxCBngHFlzyN6Dq6KAJjtx+lx6ohKHubc61OomYi7XwVD4Ol0XCVz4h+pYFR048g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" @@ -2174,31 +2201,35 @@ "@babel/plugin-proposal-class-properties@^7.16.0": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== dependencies: "@babel/helper-create-class-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" "@babel/plugin-proposal-decorators@^7.16.4": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.22.7.tgz#9b5b73c2e404f0869ef8a8a53765f8203c5467a7" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.22.10.tgz#d6a8c3a9018e1b13e6647f869c5ea56ff2b585d4" + integrity sha512-KxN6TqZzcFi4uD3UifqXElBTBNLAEH1l3vzMQj6JwJZbL2sZlThxSViOKCYY+4Ah4V4JhQ95IVB7s/Y6SJSlMQ== dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.6" + "@babel/helper-create-class-features-plugin" "^7.22.10" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.9" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/plugin-syntax-decorators" "^7.22.5" + "@babel/plugin-syntax-decorators" "^7.22.10" "@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== dependencies: "@babel/helper-plugin-utils" "^7.18.6" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" "@babel/plugin-proposal-numeric-separator@^7.16.0": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== dependencies: "@babel/helper-plugin-utils" "^7.18.6" "@babel/plugin-syntax-numeric-separator" "^7.10.4" @@ -2206,6 +2237,7 @@ "@babel/plugin-proposal-optional-chaining@^7.16.0": version "7.21.0" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" + integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== dependencies: "@babel/helper-plugin-utils" "^7.20.2" "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" @@ -2213,7 +2245,8 @@ "@babel/plugin-proposal-private-methods@^7.16.0": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== dependencies: "@babel/helper-create-class-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" @@ -2221,149 +2254,166 @@ "@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": version "7.21.0-placeholder-for-preset-env.2" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" - -"@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz" - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-bigint@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": version "7.12.13" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-class-static-block@^7.14.5": version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-syntax-decorators@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.22.5.tgz#329fe2907c73de184033775637dbbc507f09116a" +"@babel/plugin-syntax-decorators@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.22.10.tgz#7d83ea04d893c442b78ebf4c3cbac59a7211deff" + integrity sha512-z1KTVemBjnz+kSEilAsI4lbkPOl5TvJH7YDSY1CTIzvLWJ+KHXp+mRe8VPmfnyvqOPqar1V2gid2PleKzRUstQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-dynamic-import@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-export-namespace-from@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== dependencies: "@babel/helper-plugin-utils" "^7.8.3" "@babel/plugin-syntax-flow@^7.22.5": version "7.22.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz#163b820b9e7696ce134df3ee716d9c0c98035859" + integrity sha512-9RdCl0i+q0QExayk2nOS7853w08yLucnnPML6EN9S8fgMPVtdLDCdx/cOQ/i44Lb9UeQX9A35yaqBBOMMZxPxQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-assertions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" + integrity sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-attributes@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz#ab840248d834410b829f569f5262b9e517555ecb" + integrity sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-import-meta@^7.10.4", "@babel/plugin-syntax-import-meta@^7.8.3": version "7.10.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-jsx@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" + integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": version "7.10.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": version "7.10.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-catch-binding@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-chaining@^7.8.3": version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-private-property-in-object@^7.14.5": version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-typescript@^7.22.5", "@babel/plugin-syntax-typescript@^7.7.2": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" + integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" + integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" @@ -2371,21 +2421,24 @@ "@babel/plugin-transform-arrow-functions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz#e5ba566d0c58a5b2ba2a8b795450641950b71958" + integrity sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-async-generator-functions@^7.22.7": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.7.tgz#053e76c0a903b72b573cb1ab7d6882174d460a1b" +"@babel/plugin-transform-async-generator-functions@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.10.tgz#45946cd17f915b10e65c29b8ed18a0a50fc648c8" + integrity sha512-eueE8lvKVzq5wIObKK/7dvoeKJ+xc6TvRn6aysIjS6pSCeLy7S/eVi7pEQknZqyqvzaNKdDtem8nUNTBgDVR2g== dependencies: "@babel/helper-environment-visitor" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.9" "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-transform-async-to-generator@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz#c7a85f44e46f8952f6d27fe57c2ed3cc084c3775" + integrity sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ== dependencies: "@babel/helper-module-imports" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2394,18 +2447,21 @@ "@babel/plugin-transform-block-scoped-functions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz#27978075bfaeb9fa586d3cb63a3d30c1de580024" + integrity sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.5.tgz#8bfc793b3a4b2742c0983fadc1480d843ecea31b" +"@babel/plugin-transform-block-scoping@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.10.tgz#88a1dccc3383899eb5e660534a76a22ecee64faa" + integrity sha512-1+kVpGAOOI1Albt6Vse7c8pHzcZQdQKW+wJH+g8mCaszOdDVwRXa/slHPqIw+oJAJANTKDMuM2cBdV0Dg618Vg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-class-properties@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz#97a56e31ad8c9dc06a0b3710ce7803d5a48cca77" + integrity sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2413,6 +2469,7 @@ "@babel/plugin-transform-class-static-block@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.5.tgz#3e40c46f048403472d6f4183116d5e46b1bff5ba" + integrity sha512-SPToJ5eYZLxlnp1UzdARpOGeC2GbHvr9d/UV0EukuVx8atktg194oe+C5BqQ8jRTkgLRVOPYeXRSBg1IlMoVRA== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2421,6 +2478,7 @@ "@babel/plugin-transform-classes@^7.22.6": version "7.22.6" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.6.tgz#e04d7d804ed5b8501311293d1a0e6d43e94c3363" + integrity sha512-58EgM6nuPNG6Py4Z3zSuu0xWu2VfodiMi72Jt5Kj2FECmaYk1RrTXA45z6KBFsu9tRgwQDwIiY4FXTt+YsSFAQ== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-compilation-targets" "^7.22.6" @@ -2435,19 +2493,22 @@ "@babel/plugin-transform-computed-properties@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz#cd1e994bf9f316bd1c2dafcd02063ec261bb3869" + integrity sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/template" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.5.tgz#d3aca7438f6c26c78cdd0b0ba920a336001b27cc" +"@babel/plugin-transform-destructuring@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.10.tgz#38e2273814a58c810b6c34ea293be4973c4eb5e2" + integrity sha512-dPJrL0VOyxqLM9sritNbMSGx/teueHF/htMKrPT7DNxccXxRDPYqlgPFFdr8u+F+qUZOkZoXue/6rL5O5GduEw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-dotall-regex@^7.22.5", "@babel/plugin-transform-dotall-regex@^7.4.4": +"@babel/plugin-transform-dotall-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz#dbb4f0e45766eb544e193fb00e65a1dd3b2a4165" + integrity sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2455,12 +2516,14 @@ "@babel/plugin-transform-duplicate-keys@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz#b6e6428d9416f5f0bba19c70d1e6e7e0b88ab285" + integrity sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-dynamic-import@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.5.tgz#d6908a8916a810468c4edff73b5b75bda6ad393e" + integrity sha512-0MC3ppTB1AMxd8fXjSrbPa7LT9hrImt+/fcj+Pg5YMD7UQyWp/02+JWpdnCymmsXwIx5Z+sYn1bwCn4ZJNvhqQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-dynamic-import" "^7.8.3" @@ -2468,6 +2531,7 @@ "@babel/plugin-transform-exponentiation-operator@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz#402432ad544a1f9a480da865fda26be653e48f6a" + integrity sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g== dependencies: "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2475,13 +2539,15 @@ "@babel/plugin-transform-export-namespace-from@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.5.tgz#57c41cb1d0613d22f548fddd8b288eedb9973a5b" + integrity sha512-X4hhm7FRnPgd4nDA4b/5V280xCx6oL7Oob5+9qVS5C13Zq4bh1qq7LU0GgRU6b5dBWBvhGaXYVB4AcN6+ol6vg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" "@babel/plugin-transform-flow-strip-types@^7.16.0": version "7.22.5" - resolved "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz#0bb17110c7bf5b35a60754b2f00c58302381dee2" + integrity sha512-tujNbZdxdG0/54g/oua8ISToaXTFBf8EnSb5PgQSciIXWOWKX3S4+JR7ZE9ol8FZwf9kxitzkGQ+QWeov/mCiA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-flow" "^7.22.5" @@ -2489,12 +2555,14 @@ "@babel/plugin-transform-for-of@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.5.tgz#ab1b8a200a8f990137aff9a084f8de4099ab173f" + integrity sha512-3kxQjX1dU9uudwSshyLeEipvrLjBCVthCgeTp6CzE/9JYrlAIaeekVxRpCWsDDfYTfRZRoCeZatCQvwo+wvK8A== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-function-name@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz#935189af68b01898e0d6d99658db6b164205c143" + integrity sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg== dependencies: "@babel/helper-compilation-targets" "^7.22.5" "@babel/helper-function-name" "^7.22.5" @@ -2503,6 +2571,7 @@ "@babel/plugin-transform-json-strings@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.5.tgz#14b64352fdf7e1f737eed68de1a1468bd2a77ec0" + integrity sha512-DuCRB7fu8MyTLbEQd1ew3R85nx/88yMoqo2uPSjevMj3yoN7CDM8jkgrY0wmVxfJZyJ/B9fE1iq7EQppWQmR5A== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-json-strings" "^7.8.3" @@ -2510,12 +2579,14 @@ "@babel/plugin-transform-literals@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz#e9341f4b5a167952576e23db8d435849b1dd7920" + integrity sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-logical-assignment-operators@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.5.tgz#66ae5f068fd5a9a5dc570df16f56c2a8462a9d6c" + integrity sha512-MQQOUW1KL8X0cDWfbwYP+TbVbZm16QmQXJQ+vndPtH/BoO0lOKpVoEDMI7+PskYxH+IiE0tS8xZye0qr1lGzSA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" @@ -2523,12 +2594,14 @@ "@babel/plugin-transform-member-expression-literals@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz#4fcc9050eded981a468347dd374539ed3e058def" + integrity sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-modules-amd@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.22.5.tgz#4e045f55dcf98afd00f85691a68fc0780704f526" + integrity sha512-R+PTfLTcYEmb1+kK7FNkhQ1gP4KgjpSO6HfH9+f8/yfp2Nt3ggBjiVpRwmwTlfqZLafYKJACy36yDXlEmI9HjQ== dependencies: "@babel/helper-module-transforms" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2536,6 +2609,7 @@ "@babel/plugin-transform-modules-commonjs@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.22.5.tgz#7d9875908d19b8c0536085af7b053fd5bd651bfa" + integrity sha512-B4pzOXj+ONRmuaQTg05b3y/4DuFz3WcCNAXPLb2Q0GT0TrGKGxNKV4jwsXts+StaM0LQczZbOpj8o1DLPDJIiA== dependencies: "@babel/helper-module-transforms" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2544,6 +2618,7 @@ "@babel/plugin-transform-modules-systemjs@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.22.5.tgz#18c31410b5e579a0092638f95c896c2a98a5d496" + integrity sha512-emtEpoaTMsOs6Tzz+nbmcePl6AKVtS1yC4YNAeMun9U8YCsgadPNxnOPQ8GhHFB2qdx+LZu9LgoC0Lthuu05DQ== dependencies: "@babel/helper-hoist-variables" "^7.22.5" "@babel/helper-module-transforms" "^7.22.5" @@ -2553,6 +2628,7 @@ "@babel/plugin-transform-modules-umd@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz#4694ae40a87b1745e3775b6a7fe96400315d4f98" + integrity sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ== dependencies: "@babel/helper-module-transforms" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2560,6 +2636,7 @@ "@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" + integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2567,12 +2644,14 @@ "@babel/plugin-transform-new-target@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz#1b248acea54ce44ea06dfd37247ba089fcf9758d" + integrity sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-nullish-coalescing-operator@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.5.tgz#f8872c65776e0b552e0849d7596cddd416c3e381" + integrity sha512-6CF8g6z1dNYZ/VXok5uYkkBBICHZPiGEl7oDnAx2Mt1hlHVHOSIKWJaXHjQJA5VB43KZnXZDIexMchY4y2PGdA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" @@ -2580,6 +2659,7 @@ "@babel/plugin-transform-numeric-separator@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.5.tgz#57226a2ed9e512b9b446517ab6fa2d17abb83f58" + integrity sha512-NbslED1/6M+sXiwwtcAB/nieypGw02Ejf4KtDeMkCEpP6gWFMX1wI9WKYua+4oBneCCEmulOkRpwywypVZzs/g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-numeric-separator" "^7.10.4" @@ -2587,6 +2667,7 @@ "@babel/plugin-transform-object-rest-spread@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.5.tgz#9686dc3447df4753b0b2a2fae7e8bc33cdc1f2e1" + integrity sha512-Kk3lyDmEslH9DnvCDA1s1kkd3YWQITiBOHngOtDL9Pt6BZjzqb6hiOlb8VfjiiQJ2unmegBqZu0rx5RxJb5vmQ== dependencies: "@babel/compat-data" "^7.22.5" "@babel/helper-compilation-targets" "^7.22.5" @@ -2597,6 +2678,7 @@ "@babel/plugin-transform-object-super@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" + integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-replace-supers" "^7.22.5" @@ -2604,13 +2686,15 @@ "@babel/plugin-transform-optional-catch-binding@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.5.tgz#842080be3076703be0eaf32ead6ac8174edee333" + integrity sha512-pH8orJahy+hzZje5b8e2QIlBWQvGpelS76C63Z+jhZKsmzfNaPQ+LaW6dcJ9bxTpo1mtXbgHwy765Ro3jftmUg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" -"@babel/plugin-transform-optional-chaining@^7.22.5", "@babel/plugin-transform-optional-chaining@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.6.tgz#4bacfe37001fe1901117672875e931d439811564" +"@babel/plugin-transform-optional-chaining@^7.22.10", "@babel/plugin-transform-optional-chaining@^7.22.5": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.10.tgz#076d28a7e074392e840d4ae587d83445bac0372a" + integrity sha512-MMkQqZAZ+MGj+jGTG3OTuhKeBpNcO+0oCEbrGNEaOmiEn+1MzRyQlYsruGiU8RTK3zV6XwrVJTmwiDOyYK6J9g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" @@ -2619,12 +2703,14 @@ "@babel/plugin-transform-parameters@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.5.tgz#c3542dd3c39b42c8069936e48717a8d179d63a18" + integrity sha512-AVkFUBurORBREOmHRKo06FjHYgjrabpdqRSwq6+C7R5iTCZOsM4QbcB27St0a4U6fffyAOqh3s/qEfybAhfivg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-private-methods@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz#21c8af791f76674420a147ae62e9935d790f8722" + integrity sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA== dependencies: "@babel/helper-create-class-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2632,6 +2718,7 @@ "@babel/plugin-transform-private-property-in-object@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.5.tgz#07a77f28cbb251546a43d175a1dda4cf3ef83e32" + integrity sha512-/9xnaTTJcVoBtSSmrVyhtSvO3kbqS2ODoh2juEU72c3aYonNF0OMGiaz2gjukyKM2wBBYJP38S4JiE0Wfb5VMQ== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-create-class-features-plugin" "^7.22.5" @@ -2641,30 +2728,35 @@ "@babel/plugin-transform-property-literals@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" + integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-constant-elements@^7.12.1": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.22.5.tgz#6dfa7c1c37f7d7279e417ceddf5a04abb8bb9c29" + integrity sha512-BF5SXoO+nX3h5OhlN78XbbDrBOffv+AxPP2ENaJOVqjWCgBDeOY3WcaUcddutGSfoap+5NEQ/q/4I3WZIvgkXA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" + integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-jsx-development@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" + integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== dependencies: "@babel/plugin-transform-react-jsx" "^7.22.5" "@babel/plugin-transform-react-jsx@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416" + integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-module-imports" "^7.22.5" @@ -2675,43 +2767,49 @@ "@babel/plugin-transform-react-pure-annotations@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz#1f58363eef6626d6fa517b95ac66fe94685e32c0" + integrity sha512-gP4k85wx09q+brArVinTXhWiyzLl9UpmGva0+mWyKxk6JZequ05x3eUcIUE+FyttPKJFRRVtAvQaJ6YF9h1ZpA== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-regenerator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.5.tgz#cd8a68b228a5f75fa01420e8cc2fc400f0fc32aa" +"@babel/plugin-transform-regenerator@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.10.tgz#8ceef3bd7375c4db7652878b0241b2be5d0c3cca" + integrity sha512-F28b1mDt8KcT5bUyJc/U9nwzw6cV+UmTeRlXYIl2TNqMMJif0Jeey9/RQ3C4NOd2zp0/TRsDns9ttj2L523rsw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - regenerator-transform "^0.15.1" + regenerator-transform "^0.15.2" "@babel/plugin-transform-reserved-words@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz#832cd35b81c287c4bcd09ce03e22199641f964fb" + integrity sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-runtime@^7.16.4": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.7.tgz#eb9094b5fb756cc2d98d398b2c88aeefa9205de9" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.10.tgz#89eda6daf1d3af6f36fb368766553054c8d7cd46" + integrity sha512-RchI7HePu1eu0CYNKHHHQdfenZcM4nz8rew5B1VWqeRKdcwW5aQ5HeG9eTUbWiAS1UrmHVLmoxTWHt3iLD/NhA== dependencies: "@babel/helper-module-imports" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" - "@nicolo-ribaudo/semver-v6" "^6.3.3" - babel-plugin-polyfill-corejs2 "^0.4.4" - babel-plugin-polyfill-corejs3 "^0.8.2" - babel-plugin-polyfill-regenerator "^0.5.1" + babel-plugin-polyfill-corejs2 "^0.4.5" + babel-plugin-polyfill-corejs3 "^0.8.3" + babel-plugin-polyfill-regenerator "^0.5.2" + semver "^6.3.1" "@babel/plugin-transform-shorthand-properties@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" + integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-spread@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" + integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" @@ -2719,39 +2817,45 @@ "@babel/plugin-transform-sticky-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz#295aba1595bfc8197abd02eae5fc288c0deb26aa" + integrity sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-template-literals@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" + integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-typeof-symbol@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz#5e2ba478da4b603af8673ff7c54f75a97b716b34" + integrity sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-typescript@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.5.tgz#5c0f7adfc1b5f38c4dbc8f79b1f0f8074134bd7d" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.10.tgz#aadd98fab871f0bb5717bcc24c31aaaa455af923" + integrity sha512-7++c8I/ymsDo4QQBAgbraXLzIM6jmfao11KgIBEYZRReWzNWH9NtNgJcyrZiXsOPh523FQm6LfpLyy/U5fn46A== dependencies: "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-create-class-features-plugin" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.22.10" "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-typescript" "^7.22.5" -"@babel/plugin-transform-unicode-escapes@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.5.tgz#ce0c248522b1cb22c7c992d88301a5ead70e806c" +"@babel/plugin-transform-unicode-escapes@^7.22.10": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.10.tgz#c723f380f40a2b2f57a62df24c9005834c8616d9" + integrity sha512-lRfaRKGZCBqDlRU3UIFovdp9c9mEvlylmpod0/OatICsSfuQ9YFthRo1tpTkGsklEefZdqlEFdY4A2dwTb6ohg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-unicode-property-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz#098898f74d5c1e86660dc112057b2d11227f1c81" + integrity sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2759,6 +2863,7 @@ "@babel/plugin-transform-unicode-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz#ce7e7bb3ef208c4ff67e02a22816656256d7a183" + integrity sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" @@ -2766,16 +2871,18 @@ "@babel/plugin-transform-unicode-sets-regex@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz#77788060e511b708ffc7d42fdfbc5b37c3004e91" + integrity sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" "@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": - version "7.22.7" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.7.tgz#a1ef34b64a80653c22ce4d9c25603cfa76fc168a" + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.10.tgz#3263b9fe2c8823d191d28e61eac60a79f9ce8a0f" + integrity sha512-riHpLb1drNkpLlocmSyEg4oYJIQFeXAK/d7rI6mbD0XsvoTOOweXDmQPG/ErxsEhWk3rl3Q/3F6RFQlVFS8m0A== dependencies: - "@babel/compat-data" "^7.22.6" - "@babel/helper-compilation-targets" "^7.22.6" + "@babel/compat-data" "^7.22.9" + "@babel/helper-compilation-targets" "^7.22.10" "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-option" "^7.22.5" "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.22.5" @@ -2800,15 +2907,15 @@ "@babel/plugin-syntax-top-level-await" "^7.14.5" "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" "@babel/plugin-transform-arrow-functions" "^7.22.5" - "@babel/plugin-transform-async-generator-functions" "^7.22.7" + "@babel/plugin-transform-async-generator-functions" "^7.22.10" "@babel/plugin-transform-async-to-generator" "^7.22.5" "@babel/plugin-transform-block-scoped-functions" "^7.22.5" - "@babel/plugin-transform-block-scoping" "^7.22.5" + "@babel/plugin-transform-block-scoping" "^7.22.10" "@babel/plugin-transform-class-properties" "^7.22.5" "@babel/plugin-transform-class-static-block" "^7.22.5" "@babel/plugin-transform-classes" "^7.22.6" "@babel/plugin-transform-computed-properties" "^7.22.5" - "@babel/plugin-transform-destructuring" "^7.22.5" + "@babel/plugin-transform-destructuring" "^7.22.10" "@babel/plugin-transform-dotall-regex" "^7.22.5" "@babel/plugin-transform-duplicate-keys" "^7.22.5" "@babel/plugin-transform-dynamic-import" "^7.22.5" @@ -2831,43 +2938,43 @@ "@babel/plugin-transform-object-rest-spread" "^7.22.5" "@babel/plugin-transform-object-super" "^7.22.5" "@babel/plugin-transform-optional-catch-binding" "^7.22.5" - "@babel/plugin-transform-optional-chaining" "^7.22.6" + "@babel/plugin-transform-optional-chaining" "^7.22.10" "@babel/plugin-transform-parameters" "^7.22.5" "@babel/plugin-transform-private-methods" "^7.22.5" "@babel/plugin-transform-private-property-in-object" "^7.22.5" "@babel/plugin-transform-property-literals" "^7.22.5" - "@babel/plugin-transform-regenerator" "^7.22.5" + "@babel/plugin-transform-regenerator" "^7.22.10" "@babel/plugin-transform-reserved-words" "^7.22.5" "@babel/plugin-transform-shorthand-properties" "^7.22.5" "@babel/plugin-transform-spread" "^7.22.5" "@babel/plugin-transform-sticky-regex" "^7.22.5" "@babel/plugin-transform-template-literals" "^7.22.5" "@babel/plugin-transform-typeof-symbol" "^7.22.5" - "@babel/plugin-transform-unicode-escapes" "^7.22.5" + "@babel/plugin-transform-unicode-escapes" "^7.22.10" "@babel/plugin-transform-unicode-property-regex" "^7.22.5" "@babel/plugin-transform-unicode-regex" "^7.22.5" "@babel/plugin-transform-unicode-sets-regex" "^7.22.5" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.22.5" - "@nicolo-ribaudo/semver-v6" "^6.3.3" - babel-plugin-polyfill-corejs2 "^0.4.4" - babel-plugin-polyfill-corejs3 "^0.8.2" - babel-plugin-polyfill-regenerator "^0.5.1" + "@babel/preset-modules" "0.1.6-no-external-plugins" + "@babel/types" "^7.22.10" + babel-plugin-polyfill-corejs2 "^0.4.5" + babel-plugin-polyfill-corejs3 "^0.8.3" + babel-plugin-polyfill-regenerator "^0.5.2" core-js-compat "^3.31.0" + semver "^6.3.1" -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz" +"@babel/preset-modules@0.1.6-no-external-plugins": + version "0.1.6-no-external-plugins" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a" + integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" "@babel/types" "^7.4.4" esutils "^2.0.2" "@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.22.5.tgz#c4d6058fbf80bccad02dd8c313a9aaa67e3c3dd6" + integrity sha512-M+Is3WikOpEJHgR385HbuCITPTaPRaNkibTEa9oiofmJvIsrceb4yp9RL9Kb+TE8LznmeyZqpP+Lopwcx59xPQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-option" "^7.22.5" @@ -2879,6 +2986,7 @@ "@babel/preset-typescript@^7.16.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.22.5.tgz#16367d8b01d640e9a507577ed4ee54e0101e51c8" + integrity sha512-YbPaal9LxztSGhmndR46FmAbkJ/1fAsw293tSU+I5E5h+cnJ3d4GTwyUgGYmOXJYdGA+uNePle4qbaRzj2NISQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-validator-option" "^7.22.5" @@ -2889,45 +2997,44 @@ "@babel/regjsgen@^0.8.0": version "0.8.0" resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.21.0", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.3", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": - version "7.22.5" - resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.5.tgz" - dependencies: - regenerator-runtime "^0.13.11" - -"@babel/runtime@^7.20.7", "@babel/runtime@^7.22.5": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.6.tgz#57d64b9ae3cff1d67eb067ae117dac087f5bd438" +"@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.6", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.3", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.10.tgz#ae3e9631fd947cb7e3610d3e9d8fef5f76696682" + integrity sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ== dependencies: - regenerator-runtime "^0.13.11" + regenerator-runtime "^0.14.0" "@babel/template@^7.22.5", "@babel/template@^7.3.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" + integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== dependencies: "@babel/code-frame" "^7.22.5" "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@^7.22.5", "@babel/traverse@^7.22.6", "@babel/traverse@^7.22.8", "@babel/traverse@^7.7.2": - version "7.22.8" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.8.tgz#4d4451d31bc34efeae01eac222b514a77aa4000e" +"@babel/traverse@^7.22.10", "@babel/traverse@^7.7.2": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.10.tgz#20252acb240e746d27c2e82b4484f199cf8141aa" + integrity sha512-Q/urqV4pRByiNNpb/f5OSv28ZlGJiFiiTh+GAHktbIrkPhPbl90+uW6SmpoLyZqutrg9AEaEf3Q/ZBRHBXgxig== dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.7" + "@babel/code-frame" "^7.22.10" + "@babel/generator" "^7.22.10" "@babel/helper-environment-visitor" "^7.22.5" "@babel/helper-function-name" "^7.22.5" "@babel/helper-hoist-variables" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.22.7" - "@babel/types" "^7.22.5" + "@babel/parser" "^7.22.10" + "@babel/types" "^7.22.10" debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.3", "@babel/types@^7.4.4": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.10", "@babel/types@^7.22.5", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.22.10" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.10.tgz#4a9e76446048f2c66982d1a989dd12b8a2d2dc03" + integrity sha512-obaoigiLrlDZ7TUQln/8m4mSqIW2QFeOrCQc9r+xsaHGNoplVNYlRVpsfE8Vj35GEm2ZH4ZhrNYogs/3fj85kg== dependencies: "@babel/helper-string-parser" "^7.22.5" "@babel/helper-validator-identifier" "^7.22.5" @@ -2935,134 +3042,157 @@ "@bcoe/v8-coverage@^0.2.3": version "0.2.3" - resolved "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== "@csstools/normalize.css@*": version "12.0.0" - resolved "https://registry.npmjs.org/@csstools/normalize.css/-/normalize.css-12.0.0.tgz" + resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== "@csstools/postcss-cascade-layers@^1.1.1": version "1.1.1" - resolved "https://registry.npmjs.org/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" + integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== dependencies: "@csstools/selector-specificity" "^2.0.2" postcss-selector-parser "^6.0.10" "@csstools/postcss-color-function@^1.1.1": version "1.1.1" - resolved "https://registry.npmjs.org/@csstools/postcss-color-function/-/postcss-color-function-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" + integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== dependencies: "@csstools/postcss-progressive-custom-properties" "^1.1.0" postcss-value-parser "^4.2.0" "@csstools/postcss-font-format-keywords@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" + integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-hwb-function@^1.0.2": version "1.0.2" - resolved "https://registry.npmjs.org/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" + integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-ic-unit@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" + integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== dependencies: "@csstools/postcss-progressive-custom-properties" "^1.1.0" postcss-value-parser "^4.2.0" "@csstools/postcss-is-pseudo-class@^2.0.7": version "2.0.7" - resolved "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" + integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== dependencies: "@csstools/selector-specificity" "^2.0.0" postcss-selector-parser "^6.0.10" "@csstools/postcss-nested-calc@^1.0.0": version "1.0.0" - resolved "https://registry.npmjs.org/@csstools/postcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" + integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-normalize-display-values@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" + integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-oklab-function@^1.1.1": version "1.1.1" - resolved "https://registry.npmjs.org/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" + integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== dependencies: "@csstools/postcss-progressive-custom-properties" "^1.1.0" postcss-value-parser "^4.2.0" "@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": version "1.3.0" - resolved "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-stepped-value-functions@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" + integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-text-decoration-shorthand@^1.0.0": version "1.0.0" - resolved "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" + integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-trigonometric-functions@^1.0.2": version "1.0.2" - resolved "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" + integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== dependencies: postcss-value-parser "^4.2.0" "@csstools/postcss-unset-value@^1.0.2": version "1.0.2" - resolved "https://registry.npmjs.org/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" + integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== "@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": version "2.2.0" resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-2.2.0.tgz#2cbcf822bf3764c9658c4d2e568bd0c0cb748016" + integrity sha512-+OJ9konv95ClSTOJCmMZqpd5+YGsB2S+x6w3E1oaM8UuR5j8nTNHYSz8c9BEPGDOCMQYIEEGlVPj/VY64iTbGw== -"@date-io/core@^2.15.0", "@date-io/core@^2.16.0": - version "2.16.0" - resolved "https://registry.npmjs.org/@date-io/core/-/core-2.16.0.tgz" +"@date-io/core@^2.15.0", "@date-io/core@^2.17.0": + version "2.17.0" + resolved "https://registry.yarnpkg.com/@date-io/core/-/core-2.17.0.tgz#360a4d0641f069776ed22e457876e8a8a58c205e" + integrity sha512-+EQE8xZhRM/hsY0CDTVyayMDDY5ihc4MqXCrPxooKw19yAzUIC6uUqsZeaOFNL9YKTNxYKrJP5DFgE8o5xRCOw== "@date-io/date-fns@^2.15.0": - version "2.16.0" - resolved "https://registry.npmjs.org/@date-io/date-fns/-/date-fns-2.16.0.tgz" + version "2.17.0" + resolved "https://registry.yarnpkg.com/@date-io/date-fns/-/date-fns-2.17.0.tgz#1d9d0a02e0137524331819c9576a4e8e19a6142b" + integrity sha512-L0hWZ/mTpy3Gx/xXJ5tq5CzHo0L7ry6KEO9/w/JWiFWFLZgiNVo3ex92gOl3zmzjHqY/3Ev+5sehAr8UnGLEng== dependencies: - "@date-io/core" "^2.16.0" + "@date-io/core" "^2.17.0" "@date-io/dayjs@^2.15.0": - version "2.16.0" - resolved "https://registry.npmjs.org/@date-io/dayjs/-/dayjs-2.16.0.tgz" + version "2.17.0" + resolved "https://registry.yarnpkg.com/@date-io/dayjs/-/dayjs-2.17.0.tgz#ec3e2384136c028971ca2f78800a6877b9fdbe62" + integrity sha512-Iq1wjY5XzBh0lheFA0it6Dsyv94e8mTiNR8vuTai+KopxDkreL3YjwTmZHxkgB7/vd0RMIACStzVgWvPATnDCA== dependencies: - "@date-io/core" "^2.16.0" + "@date-io/core" "^2.17.0" "@date-io/luxon@^2.15.0": - version "2.16.1" - resolved "https://registry.npmjs.org/@date-io/luxon/-/luxon-2.16.1.tgz" + version "2.17.0" + resolved "https://registry.yarnpkg.com/@date-io/luxon/-/luxon-2.17.0.tgz#76e1f001aaa38fe7f0049f010fe356db1bb517d2" + integrity sha512-l712Vdm/uTddD2XWt9TlQloZUiTiRQtY5TCOG45MQ/8u0tu8M17BD6QYHar/3OrnkGybALAMPzCy1r5D7+0HBg== dependencies: - "@date-io/core" "^2.16.0" + "@date-io/core" "^2.17.0" "@date-io/moment@^2.15.0": - version "2.16.1" - resolved "https://registry.npmjs.org/@date-io/moment/-/moment-2.16.1.tgz" + version "2.17.0" + resolved "https://registry.yarnpkg.com/@date-io/moment/-/moment-2.17.0.tgz#04d2487d9d15d468b2e7903b87268fa1c89b56cb" + integrity sha512-e4nb4CDZU4k0WRVhz1Wvl7d+hFsedObSauDHKtZwU9kt7gdYEAzKgnrSCTHsEaXrDumdrkCYTeZ0Tmyk7uV4tw== dependencies: - "@date-io/core" "^2.16.0" + "@date-io/core" "^2.17.0" "@emotion/babel-plugin@^11.11.0": version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz" + resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz#c2d872b6a7767a9d176d007f5b31f7d504bb5d6c" + integrity sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ== dependencies: "@babel/helper-module-imports" "^7.16.7" "@babel/runtime" "^7.18.3" @@ -3078,7 +3208,8 @@ "@emotion/cache@^10.0.27": version "10.0.29" - resolved "https://registry.npmjs.org/@emotion/cache/-/cache-10.0.29.tgz" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-10.0.29.tgz#87e7e64f412c060102d589fe7c6dc042e6f9d1e0" + integrity sha512-fU2VtSVlHiF27empSbxi1O2JFdNWZO+2NFHfwO0pxgTep6Xa3uGb+3pVKfLww2l/IBGLNEZl5Xf/++A4wAYDYQ== dependencies: "@emotion/sheet" "0.9.4" "@emotion/stylis" "0.8.5" @@ -3087,7 +3218,8 @@ "@emotion/cache@^11.11.0": version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.11.0.tgz#809b33ee6b1cb1a625fef7a45bc568ccd9b8f3ff" + integrity sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ== dependencies: "@emotion/memoize" "^0.8.1" "@emotion/sheet" "^1.2.2" @@ -3097,7 +3229,8 @@ "@emotion/core@^10.0.28": version "10.3.1" - resolved "https://registry.npmjs.org/@emotion/core/-/core-10.3.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/core/-/core-10.3.1.tgz#4021b6d8b33b3304d48b0bb478485e7d7421c69d" + integrity sha512-447aUEjPIm0MnE6QYIaFz9VQOHSXf4Iu6EWOIqq11EAPqinkSZmfymPTmlOE3QjLv846lH4JVZBUOtwGbuQoww== dependencies: "@babel/runtime" "^7.5.5" "@emotion/cache" "^10.0.27" @@ -3108,7 +3241,8 @@ "@emotion/css@^10.0.27": version "10.0.27" - resolved "https://registry.npmjs.org/@emotion/css/-/css-10.0.27.tgz" + resolved "https://registry.yarnpkg.com/@emotion/css/-/css-10.0.27.tgz#3a7458198fbbebb53b01b2b87f64e5e21241e14c" + integrity sha512-6wZjsvYeBhyZQYNrGoR5yPMYbMBNEnanDrqmsqS1mzDm1cOTu12shvl2j4QHNS36UaTE0USIJawCH9C8oW34Zw== dependencies: "@emotion/serialize" "^0.11.15" "@emotion/utils" "0.11.3" @@ -3116,35 +3250,42 @@ "@emotion/hash@0.8.0": version "0.8.0" - resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" + integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== "@emotion/hash@^0.9.1": version "0.9.1" - resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.9.1.tgz#4ffb0055f7ef676ebc3a5a91fb621393294e2f43" + integrity sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ== "@emotion/is-prop-valid@0.8.8": version "0.8.8" - resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz" + resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz#db28b1c4368a259b60a97311d6a952d4fd01ac1a" + integrity sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA== dependencies: "@emotion/memoize" "0.7.4" "@emotion/is-prop-valid@^1.2.1": version "1.2.1" resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz#23116cf1ed18bfeac910ec6436561ecb1a3885cc" + integrity sha512-61Mf7Ufx4aDxx1xlDeOm8aFFigGHE4z+0sKCa+IHCeZKiyP9RLD0Mmx7m8b9/Cf37f7NAvQOOJAbQQGVr5uERw== dependencies: "@emotion/memoize" "^0.8.1" "@emotion/memoize@0.7.4": version "0.7.4" - resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.4.tgz#19bf0f5af19149111c40d98bb0cf82119f5d9eeb" + integrity sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw== "@emotion/memoize@^0.8.1": version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.8.1.tgz#c1ddb040429c6d21d38cc945fe75c818cfb68e17" + integrity sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA== "@emotion/react@^11.8.2": version "11.11.1" - resolved "https://registry.npmjs.org/@emotion/react/-/react-11.11.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.11.1.tgz#b2c36afac95b184f73b08da8c214fdf861fa4157" + integrity sha512-5mlW1DquU5HaxjLkfkGN1GA/fvVGdyHURRiX/0FHl2cfIfRxSOfmxEH5YS43edp0OldZrZ+dkBKbngxcNCdZvA== dependencies: "@babel/runtime" "^7.18.3" "@emotion/babel-plugin" "^11.11.0" @@ -3157,7 +3298,8 @@ "@emotion/serialize@^0.11.15", "@emotion/serialize@^0.11.16": version "0.11.16" - resolved "https://registry.npmjs.org/@emotion/serialize/-/serialize-0.11.16.tgz" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-0.11.16.tgz#dee05f9e96ad2fb25a5206b6d759b2d1ed3379ad" + integrity sha512-G3J4o8by0VRrO+PFeSc3js2myYNOXVJ3Ya+RGVxnshRYgsvErfAOglKAiy1Eo1vhzxqtUvjCyS5gtewzkmvSSg== dependencies: "@emotion/hash" "0.8.0" "@emotion/memoize" "0.7.4" @@ -3167,7 +3309,8 @@ "@emotion/serialize@^1.1.2": version "1.1.2" - resolved "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.1.2.tgz#017a6e4c9b8a803bd576ff3d52a0ea6fa5a62b51" + integrity sha512-zR6a/fkFP4EAcCMQtLOhIgpprZOwNmCldtpaISpvz348+DP4Mz8ZoKaGGCQpbzepNIUWbq4w6hNZkwDyKoS+HA== dependencies: "@emotion/hash" "^0.9.1" "@emotion/memoize" "^0.8.1" @@ -3177,15 +3320,18 @@ "@emotion/sheet@0.9.4": version "0.9.4" - resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-0.9.4.tgz" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-0.9.4.tgz#894374bea39ec30f489bbfc3438192b9774d32e5" + integrity sha512-zM9PFmgVSqBw4zL101Q0HrBVTGmpAxFZH/pYx/cjJT5advXguvcgjHFTCaIO3enL/xr89vK2bh0Mfyj9aa0ANA== "@emotion/sheet@^1.2.2": version "1.2.2" - resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.2.2.tgz#d58e788ee27267a14342303e1abb3d508b6d0fec" + integrity sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA== "@emotion/styled-base@^10.3.0": version "10.3.0" - resolved "https://registry.npmjs.org/@emotion/styled-base/-/styled-base-10.3.0.tgz" + resolved "https://registry.yarnpkg.com/@emotion/styled-base/-/styled-base-10.3.0.tgz#9aa2c946100f78b47316e4bc6048321afa6d4e36" + integrity sha512-PBRqsVKR7QRNkmfH78hTSSwHWcwDpecH9W6heujWAcyp2wdz/64PP73s7fWS1dIPm8/Exc8JAzYS8dEWXjv60w== dependencies: "@babel/runtime" "^7.5.5" "@emotion/is-prop-valid" "0.8.8" @@ -3194,7 +3340,8 @@ "@emotion/styled@^10.0.27": version "10.3.0" - resolved "https://registry.npmjs.org/@emotion/styled/-/styled-10.3.0.tgz" + resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-10.3.0.tgz#8ee959bf75730789abb5f67f7c3ded0c30aec876" + integrity sha512-GgcUpXBBEU5ido+/p/mCT2/Xx+Oqmp9JzQRuC+a4lYM4i4LBBn/dWvc0rQ19N9ObA8/T4NWMrPNe79kMBDJqoQ== dependencies: "@emotion/styled-base" "^10.3.0" babel-plugin-emotion "^10.0.27" @@ -3202,6 +3349,7 @@ "@emotion/styled@^11.8.1": version "11.11.0" resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.11.0.tgz#26b75e1b5a1b7a629d7c0a8b708fbf5a9cdce346" + integrity sha512-hM5Nnvu9P3midq5aaXj4I+lnSfNi7Pmd4EWk1fOZ3pxookaQTNew6bp4JaCBYM4HVFZF9g7UjJmsUmC2JlxOng== dependencies: "@babel/runtime" "^7.18.3" "@emotion/babel-plugin" "^11.11.0" @@ -3212,49 +3360,60 @@ "@emotion/stylis@0.8.5": version "0.8.5" - resolved "https://registry.npmjs.org/@emotion/stylis/-/stylis-0.8.5.tgz" + resolved "https://registry.yarnpkg.com/@emotion/stylis/-/stylis-0.8.5.tgz#deacb389bd6ee77d1e7fcaccce9e16c5c7e78e04" + integrity sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ== "@emotion/unitless@0.7.5": version "0.7.5" - resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== "@emotion/unitless@^0.8.1": version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.8.1.tgz#182b5a4704ef8ad91bde93f7a860a88fd92c79a3" + integrity sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ== "@emotion/use-insertion-effect-with-fallbacks@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz#08de79f54eb3406f9daaf77c76e35313da963963" + integrity sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw== "@emotion/utils@0.11.3": version "0.11.3" - resolved "https://registry.npmjs.org/@emotion/utils/-/utils-0.11.3.tgz" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-0.11.3.tgz#a759863867befa7e583400d322652a3f44820924" + integrity sha512-0o4l6pZC+hI88+bzuaX/6BgOvQVhbt2PfmxauVaYOGgbsAw14wdKyvMCZXnsnsHys94iadcF+RG/wZyx6+ZZBw== "@emotion/utils@^1.2.1": version "1.2.1" - resolved "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.2.1.tgz#bbab58465738d31ae4cb3dbb6fc00a5991f755e4" + integrity sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg== "@emotion/weak-memoize@0.2.5": version "0.2.5" - resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" + integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== "@emotion/weak-memoize@^0.3.1": version "0.3.1" - resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz#d0fce5d07b0620caa282b5131c297bb60f9d87e6" + integrity sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww== "@eslint-community/eslint-utils@^4.2.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: eslint-visitor-keys "^3.3.0" -"@eslint-community/regexpp@^4.4.0": - version "4.5.1" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.5.1.tgz#cdd35dce4fa1a89a4fd42b1599eb35b3af408884" +"@eslint-community/regexpp@^4.4.0", "@eslint-community/regexpp@^4.6.1": + version "4.6.2" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.6.2.tgz#1816b5f6948029c5eaacb0703b850ee0cb37d8f8" + integrity sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw== -"@eslint/eslintrc@^2.1.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.0.tgz#82256f164cc9e0b59669efc19d57f8092706841d" +"@eslint/eslintrc@^2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" + integrity sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g== dependencies: ajv "^6.12.4" debug "^4.3.2" @@ -3266,17 +3425,20 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@eslint/js@8.44.0": - version "8.44.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.44.0.tgz#961a5903c74139390478bdc808bcde3fc45ab7af" +"@eslint/js@^8.47.0": + version "8.47.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.47.0.tgz#5478fdf443ff8158f9de171c704ae45308696c7d" + integrity sha512-P6omY1zv5MItm93kLM8s2vr1HICJH8v0dvddDhysbIuZ+vcjOHg5Zbkf1mTkcmi2JA9oBG2anOkRnW8WJTS8Og== "@graphql-typed-document-node/core@^3.1.1": version "3.2.0" resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== "@humanwhocodes/config-array@^0.11.10": version "0.11.10" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2" + integrity sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ== dependencies: "@humanwhocodes/object-schema" "^1.2.1" debug "^4.1.1" @@ -3284,15 +3446,18 @@ "@humanwhocodes/module-importer@^1.0.1": version "1.0.1" - resolved "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^1.2.1": version "1.2.1" - resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" find-up "^4.1.0" @@ -3302,11 +3467,13 @@ "@istanbuljs/schema@^0.1.2": version "0.1.3" - resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== "@jest/console@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/console/-/console-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== dependencies: "@jest/types" "^27.5.1" "@types/node" "*" @@ -3317,7 +3484,8 @@ "@jest/console@^28.1.3": version "28.1.3" - resolved "https://registry.npmjs.org/@jest/console/-/console-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== dependencies: "@jest/types" "^28.1.3" "@types/node" "*" @@ -3328,7 +3496,8 @@ "@jest/core@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/core/-/core-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== dependencies: "@jest/console" "^27.5.1" "@jest/reporters" "^27.5.1" @@ -3361,22 +3530,25 @@ "@jest/environment@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/environment/-/environment-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== dependencies: "@jest/fake-timers" "^27.5.1" "@jest/types" "^27.5.1" "@types/node" "*" jest-mock "^27.5.1" -"@jest/expect-utils@^29.6.1": - version "29.6.1" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.6.1.tgz#ab83b27a15cdd203fe5f68230ea22767d5c3acc5" +"@jest/expect-utils@^29.6.2": + version "29.6.2" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.6.2.tgz#1b97f290d0185d264dd9fdec7567a14a38a90534" + integrity sha512-6zIhM8go3RV2IG4aIZaZbxwpOzz3ZiM23oxAlkquOIole+G6TrbeXnykxWYlqF7kz2HlBjdKtca20x9atkEQYg== dependencies: jest-get-type "^29.4.3" "@jest/fake-timers@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== dependencies: "@jest/types" "^27.5.1" "@sinonjs/fake-timers" "^8.0.1" @@ -3387,7 +3559,8 @@ "@jest/globals@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/globals/-/globals-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== dependencies: "@jest/environment" "^27.5.1" "@jest/types" "^27.5.1" @@ -3395,7 +3568,8 @@ "@jest/reporters@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/reporters/-/reporters-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== dependencies: "@bcoe/v8-coverage" "^0.2.3" "@jest/console" "^27.5.1" @@ -3425,19 +3599,22 @@ "@jest/schemas@^28.1.3": version "28.1.3" - resolved "https://registry.npmjs.org/@jest/schemas/-/schemas-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== dependencies: "@sinclair/typebox" "^0.24.1" "@jest/schemas@^29.6.0": version "29.6.0" resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.0.tgz#0f4cb2c8e3dca80c135507ba5635a4fd755b0040" + integrity sha512-rxLjXyJBTL4LQeJW3aKo0M/+GkCOXsO+8i9Iu7eDb6KwtP65ayoDsitrdPBtujxQ88k4wI2FNYfa6TOGwSn6cQ== dependencies: "@sinclair/typebox" "^0.27.8" "@jest/source-map@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/source-map/-/source-map-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== dependencies: callsites "^3.0.0" graceful-fs "^4.2.9" @@ -3445,7 +3622,8 @@ "@jest/test-result@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/test-result/-/test-result-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== dependencies: "@jest/console" "^27.5.1" "@jest/types" "^27.5.1" @@ -3454,7 +3632,8 @@ "@jest/test-result@^28.1.3": version "28.1.3" - resolved "https://registry.npmjs.org/@jest/test-result/-/test-result-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== dependencies: "@jest/console" "^28.1.3" "@jest/types" "^28.1.3" @@ -3463,7 +3642,8 @@ "@jest/test-sequencer@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== dependencies: "@jest/test-result" "^27.5.1" graceful-fs "^4.2.9" @@ -3472,7 +3652,8 @@ "@jest/transform@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/transform/-/transform-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== dependencies: "@babel/core" "^7.1.0" "@jest/types" "^27.5.1" @@ -3492,7 +3673,8 @@ "@jest/types@^27.5.1": version "27.5.1" - resolved "https://registry.npmjs.org/@jest/types/-/types-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== dependencies: "@types/istanbul-lib-coverage" "^2.0.0" "@types/istanbul-reports" "^3.0.0" @@ -3502,7 +3684,8 @@ "@jest/types@^28.1.3": version "28.1.3" - resolved "https://registry.npmjs.org/@jest/types/-/types-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== dependencies: "@jest/schemas" "^28.1.3" "@types/istanbul-lib-coverage" "^2.0.0" @@ -3514,6 +3697,7 @@ "@jest/types@^29.6.1": version "29.6.1" resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.1.tgz#ae79080278acff0a6af5eb49d063385aaa897bf2" + integrity sha512-tPKQNMPuXgvdOn2/Lg9HNfUvjYVGolt04Hp03f5hAk878uwOLikN+JzeLY0HcVgKgFl9Hs3EIqpu3WX27XNhnw== dependencies: "@jest/schemas" "^29.6.0" "@types/istanbul-lib-coverage" "^2.0.0" @@ -3525,121 +3709,138 @@ "@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": version "0.3.3" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@3.1.0": - version "3.1.0" - resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz" +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== "@jridgewell/set-array@^1.0.1": version "1.1.2" - resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== "@jridgewell/source-map@^0.3.3": version "0.3.5" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91" + integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ== dependencies: "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz" +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + version "0.3.19" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" + integrity sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw== dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" "@juggle/resize-observer@^3.3.1": version "3.4.0" resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60" + integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA== "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" - resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== "@monaco-editor/loader@^1.3.3": version "1.3.3" resolved "https://registry.yarnpkg.com/@monaco-editor/loader/-/loader-1.3.3.tgz#7f1742bd3cc21c0362a46a4056317f6e5215cfca" + integrity sha512-6KKF4CTzcJiS8BJwtxtfyYt9shBiEv32ateQ9T4UVogwn4HM/uPo9iJd2Dmbkpz8CM6Y0PDUpjnZzCwC+eYo2Q== dependencies: state-local "^1.0.6" "@monaco-editor/react@^4.3.1": version "4.5.1" resolved "https://registry.yarnpkg.com/@monaco-editor/react/-/react-4.5.1.tgz#fbc76c692aee9a33b9ab24ae0c5f219b8f002fdb" + integrity sha512-NNDFdP+2HojtNhCkRfE6/D6ro6pBNihaOzMbGK84lNWzRu+CfBjwzGt4jmnqimLuqp5yE5viHS2vi+QOAnD5FQ== dependencies: "@monaco-editor/loader" "^1.3.3" -"@mui/base@5.0.0-beta.6": - version "5.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@mui/base/-/base-5.0.0-beta.6.tgz#c4537231619f4642ebda714c2cfd0e598aa9f511" +"@mui/base@5.0.0-beta.11": + version "5.0.0-beta.11" + resolved "https://registry.yarnpkg.com/@mui/base/-/base-5.0.0-beta.11.tgz#0124d336f1931c6cd5f0008d015df5bd8fafd3a8" + integrity sha512-FdKZGPd8qmC3ZNke7CNhzcEgToc02M6WYZc9hcBsNQ17bgAd3s9F//1bDDYgMVBYxDM71V0sv/hBHlOY4I1ZVA== dependencies: - "@babel/runtime" "^7.22.5" + "@babel/runtime" "^7.22.6" "@emotion/is-prop-valid" "^1.2.1" "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" + "@mui/utils" "^5.14.5" "@popperjs/core" "^2.11.8" - clsx "^1.2.1" + clsx "^2.0.0" prop-types "^15.8.1" react-is "^18.2.0" -"@mui/core-downloads-tracker@^5.13.7": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/core-downloads-tracker/-/core-downloads-tracker-5.13.7.tgz#f4d9af5fe113b80b98b2cb158263d7b8f77e61c7" +"@mui/core-downloads-tracker@^5.14.5": + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.5.tgz#c5854b89d57520c77253a79b20b784d5c2903fb6" + integrity sha512-+wpGH1USwPcKMFPMvXqYPC6fEvhxM3FzxC8lyDiNK/imLyyJ6y2DPb1Oue7OGIKJWBmYBqrWWtfovrxd1aJHTA== "@mui/icons-material@^5.5.1": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/icons-material/-/icons-material-5.13.7.tgz#d83532363196b49d8716987e9a2c12f55b233cc1" + version "5.14.3" + resolved "https://registry.yarnpkg.com/@mui/icons-material/-/icons-material-5.14.3.tgz#26a84d52ab2fceea2856adf7a139527b3a51ae90" + integrity sha512-XkxWPhageu1OPUm2LWjo5XqeQ0t2xfGe8EiLkRW9oz2LHMMZmijvCxulhgquUVTF1DnoSh+3KoDLSsoAFtVNVw== dependencies: - "@babel/runtime" "^7.22.5" + "@babel/runtime" "^7.22.6" "@mui/lab@^5.0.0-alpha.74": - version "5.0.0-alpha.135" - resolved "https://registry.yarnpkg.com/@mui/lab/-/lab-5.0.0-alpha.135.tgz#d4c4145b63d5839cc201136fc4e152fd31690b44" + version "5.0.0-alpha.140" + resolved "https://registry.yarnpkg.com/@mui/lab/-/lab-5.0.0-alpha.140.tgz#07028563fb18715e49da973ee814da7c0976133d" + integrity sha512-k75jos6jklCD8tA20PAK2H4RSCKycTcR4Pbfz7JbdxIkWXJ+y2MRalwMcen1vpB99v0yZHNUo6BtGz6rvs2jlQ== dependencies: - "@babel/runtime" "^7.22.5" - "@mui/base" "5.0.0-beta.6" - "@mui/system" "^5.13.7" + "@babel/runtime" "^7.22.6" + "@mui/base" "5.0.0-beta.11" + "@mui/system" "^5.14.5" "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - clsx "^1.2.1" + "@mui/utils" "^5.14.5" + clsx "^2.0.0" prop-types "^15.8.1" react-is "^18.2.0" "@mui/material@^5.5.2": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/material/-/material-5.13.7.tgz#0a4cef14d2a647eb6b049557a795744ff35df755" - dependencies: - "@babel/runtime" "^7.22.5" - "@mui/base" "5.0.0-beta.6" - "@mui/core-downloads-tracker" "^5.13.7" - "@mui/system" "^5.13.7" + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/material/-/material-5.14.5.tgz#4610b381fd159cd208c28e1d1f29c303ea24a518" + integrity sha512-4qa4GMfuZH0Ai3mttk5ccXP8a3sf7aPlAJwyMrUSz6h9hPri6BPou94zeu3rENhhmKLby9S/W1y+pmficy8JKA== + dependencies: + "@babel/runtime" "^7.22.6" + "@mui/base" "5.0.0-beta.11" + "@mui/core-downloads-tracker" "^5.14.5" + "@mui/system" "^5.14.5" "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" + "@mui/utils" "^5.14.5" "@types/react-transition-group" "^4.4.6" - clsx "^1.2.1" + clsx "^2.0.0" csstype "^3.1.2" prop-types "^15.8.1" react-is "^18.2.0" react-transition-group "^4.4.5" -"@mui/private-theming@^5.13.7": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.13.7.tgz#2f8ef5da066f3c6c6423bd4260d003a28d10b099" +"@mui/private-theming@^5.14.5": + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/private-theming/-/private-theming-5.14.5.tgz#834e1569c31e2644665f98d902def79014053017" + integrity sha512-cC4C5RrpXpDaaZyH9QwmPhRLgz+f2SYbOty3cPkk4qPSOSfif2ZEcDD9HTENKDDd9deB+xkPKzzZhi8cxIx8Ig== dependencies: - "@babel/runtime" "^7.22.5" - "@mui/utils" "^5.13.7" + "@babel/runtime" "^7.22.6" + "@mui/utils" "^5.14.5" prop-types "^15.8.1" "@mui/styled-engine@^5.13.2": version "5.13.2" resolved "https://registry.yarnpkg.com/@mui/styled-engine/-/styled-engine-5.13.2.tgz#c87bd61c0ab8086d34828b6defe97c02bcd642ef" + integrity sha512-VCYCU6xVtXOrIN8lcbuPmoG+u7FYuOERG++fpY74hPpEWkyFQG97F+/XfTQVYzlR2m7nPjnwVUgATcTCMEaMvw== dependencies: "@babel/runtime" "^7.21.0" "@emotion/cache" "^11.11.0" @@ -3647,15 +3848,16 @@ prop-types "^15.8.1" "@mui/styles@^5.5.1": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/styles/-/styles-5.13.7.tgz#b3d98741ccfaad32c6341c9b1dc5072578327d66" + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/styles/-/styles-5.14.5.tgz#182759e844045b613a3c86266804642860c2fe53" + integrity sha512-yss4BRGae6ib4gq6YpVLnPyhHiuSIENwlDPWrTEqgc1UhTMmDBGZ7ZCdZ15YGdwviJuNDDf5Bcp3GK4rE5wZNQ== dependencies: - "@babel/runtime" "^7.22.5" + "@babel/runtime" "^7.22.6" "@emotion/hash" "^0.9.1" - "@mui/private-theming" "^5.13.7" + "@mui/private-theming" "^5.14.5" "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - clsx "^1.2.1" + "@mui/utils" "^5.14.5" + clsx "^2.0.0" csstype "^3.1.2" hoist-non-react-statics "^3.3.2" jss "^10.10.0" @@ -3668,38 +3870,31 @@ jss-plugin-vendor-prefixer "^10.10.0" prop-types "^15.8.1" -"@mui/system@^5.13.7": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.13.7.tgz#b02e6284bbaab4201b142546ebbb2012ec0fa63d" +"@mui/system@^5.14.5": + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/system/-/system-5.14.5.tgz#614394c4183d90df82c540e0e736ba72c1f95f8e" + integrity sha512-mextXZHDeGcR7E1kx43TRARrVXy+gI4wzpUgNv7MqZs1dvTVXQGVeAT6ydj9d6FUqHBPMNLGV/21vJOrpqsL+w== dependencies: - "@babel/runtime" "^7.22.5" - "@mui/private-theming" "^5.13.7" + "@babel/runtime" "^7.22.6" + "@mui/private-theming" "^5.14.5" "@mui/styled-engine" "^5.13.2" "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - clsx "^1.2.1" + "@mui/utils" "^5.14.5" + clsx "^2.0.0" csstype "^3.1.2" prop-types "^15.8.1" "@mui/types@^7.2.4": version "7.2.4" resolved "https://registry.yarnpkg.com/@mui/types/-/types-7.2.4.tgz#b6fade19323b754c5c6de679a38f068fd50b9328" + integrity sha512-LBcwa8rN84bKF+f5sDyku42w1NTxaPgPyYKODsh01U1fVstTClbUoSA96oyRBnSNyEiAVjKm6Gwx9vjR+xyqHA== -"@mui/utils@^5.10.3": - version "5.13.1" - resolved "https://registry.npmjs.org/@mui/utils/-/utils-5.13.1.tgz" +"@mui/utils@^5.10.3", "@mui/utils@^5.14.5": + version "5.14.5" + resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.14.5.tgz#98fb6060610b793a8478e70ffe5e4ed5bd922dba" + integrity sha512-6Hzw63VR9C5xYv+CbjndoRLU6Gntal8rJ5W+GUzkyHrGWIyYPWZPa6AevnyGioySNETATe1H9oXS8f/7qgIHJA== dependencies: - "@babel/runtime" "^7.21.0" - "@types/prop-types" "^15.7.5" - "@types/react-is" "^18.2.0" - prop-types "^15.8.1" - react-is "^18.2.0" - -"@mui/utils@^5.13.7": - version "5.13.7" - resolved "https://registry.yarnpkg.com/@mui/utils/-/utils-5.13.7.tgz#7e6a8336e05eb2642667a5c02eb605351e27ec20" - dependencies: - "@babel/runtime" "^7.22.5" + "@babel/runtime" "^7.22.6" "@types/prop-types" "^15.7.5" "@types/react-is" "^18.2.1" prop-types "^15.8.1" @@ -3708,6 +3903,7 @@ "@mui/x-data-grid@^5.7.0": version "5.17.26" resolved "https://registry.yarnpkg.com/@mui/x-data-grid/-/x-data-grid-5.17.26.tgz#1f7fa73dd3986cf052e2fd2cb56eb4678a7bd913" + integrity sha512-eGJq9J0g9cDGLFfMmugOadZx0mJeOd/yQpHwEa5gUXyONS6qF0OhXSWyDOhDdA3l2TOoQzotMN5dY/T4Wl1KYA== dependencies: "@babel/runtime" "^7.18.9" "@mui/utils" "^5.10.3" @@ -3717,7 +3913,8 @@ "@mui/x-date-pickers@^5.0.0": version "5.0.20" - resolved "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-5.0.20.tgz" + resolved "https://registry.yarnpkg.com/@mui/x-date-pickers/-/x-date-pickers-5.0.20.tgz#7b4e5b5a214a8095937ba7d82bb82acd6f270d72" + integrity sha512-ERukSeHIoNLbI1C2XRhF9wRhqfsr+Q4B1SAw2ZlU7CWgcG8UBOxgqRKDEOVAIoSWL+DWT6GRuQjOKvj6UXZceA== dependencies: "@babel/runtime" "^7.18.9" "@date-io/core" "^2.15.0" @@ -3734,35 +3931,36 @@ "@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": version "5.1.1-v1" - resolved "https://registry.npmjs.org/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz" + resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" + integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== dependencies: eslint-scope "5.1.1" -"@nicolo-ribaudo/semver-v6@^6.3.3": - version "6.3.3" - resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/semver-v6/-/semver-v6-6.3.3.tgz#ea6d23ade78a325f7a52750aab1526b02b628c29" - "@nodelib/fs.scandir@2.1.5": version "2.1.5" - resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" - resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== "@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": version "1.2.8" - resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" "@pmmmwh/react-refresh-webpack-plugin@^0.5.3": - version "0.5.10" - resolved "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz" + version "0.5.11" + resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.11.tgz#7c2268cedaa0644d677e8c4f377bc8fb304f714a" + integrity sha512-7j/6vdTym0+qZ6u4XbSAxrWBGYSdCfTzySkj7WAFgDLmSyWlOrWvpyzxlFh5jtw9dn0oL/jtW+06XfFiisN3JQ== dependencies: ansi-html-community "^0.0.8" common-path-prefix "^3.0.0" @@ -3777,10 +3975,12 @@ "@popperjs/core@^2.11.8": version "2.11.8" resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.8.tgz#6b79032e760a0899cd4204710beede972a3a185f" + integrity sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A== "@reduxjs/toolkit@^1.8.0": version "1.9.5" resolved "https://registry.yarnpkg.com/@reduxjs/toolkit/-/toolkit-1.9.5.tgz#d3987849c24189ca483baa7aa59386c8e52077c4" + integrity sha512-Rt97jHmfTeaxL4swLRNPD/zV4OxTes4la07Xc4hetpUW/vc75t5m1ANyxG6ymnEQ2FsLQsoMlYB2vV1sO3m8tQ== dependencies: immer "^9.0.21" redux "^4.2.1" @@ -3789,14 +3989,16 @@ "@rollup/plugin-babel@^5.2.0": version "5.3.1" - resolved "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz" + resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== dependencies: "@babel/helper-module-imports" "^7.10.4" "@rollup/pluginutils" "^3.1.0" "@rollup/plugin-node-resolve@^11.2.1": version "11.2.1" - resolved "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== dependencies: "@rollup/pluginutils" "^3.1.0" "@types/resolve" "1.17.1" @@ -3807,46 +4009,61 @@ "@rollup/plugin-replace@^2.4.1": version "2.4.2" - resolved "https://registry.npmjs.org/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz" + resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== dependencies: "@rollup/pluginutils" "^3.1.0" magic-string "^0.25.7" "@rollup/pluginutils@^3.1.0": version "3.1.0" - resolved "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== dependencies: "@types/estree" "0.0.39" estree-walker "^1.0.1" picomatch "^2.2.2" "@rushstack/eslint-patch@^1.1.0": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" + version "1.3.3" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.3.tgz#16ab6c727d8c2020a5b6e4a176a243ecd88d8d69" + integrity sha512-0xd7qez0AQ+MbHatZTlI1gu5vkG8r7MYRUJAHPAHJBmGLs16zpkrpAVLvjQKQOqaXPDUBwOiJzNc00znHSCVBw== "@sinclair/typebox@^0.24.1": version "0.24.51" - resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.51.tgz" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== "@sinclair/typebox@^0.27.8": version "0.27.8" resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== "@sinonjs/commons@^1.7.0": version "1.8.6" - resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.6.tgz" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.6.tgz#80c516a4dc264c2a69115e7578d62581ff455ed9" + integrity sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ== dependencies: type-detect "4.0.8" "@sinonjs/fake-timers@^8.0.1": version "8.1.0" - resolved "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== dependencies: "@sinonjs/commons" "^1.7.0" +"@smithy/types@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.2.1.tgz#49f2f32bb2f54822c324ecf347b7706016581a0b" + integrity sha512-6nyDOf027ZeJiQVm6PXmLm7dR+hR2YJUkr4VwUniXA8xZUGAu5Mk0zfx2BPFrt+e5YauvlIqQoH0CsrM4tLkfg== + dependencies: + tslib "^2.5.0" + "@surma/rollup-plugin-off-main-thread@^2.2.3": version "2.2.3" - resolved "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz" + resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== dependencies: ejs "^3.1.6" json5 "^2.2.0" @@ -3855,39 +4072,48 @@ "@svgr/babel-plugin-add-jsx-attribute@^5.4.0": version "5.4.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== "@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": version "5.4.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== "@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": version "5.0.1" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== "@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": version "5.0.1" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== "@svgr/babel-plugin-svg-dynamic-title@^5.4.0": version "5.4.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== "@svgr/babel-plugin-svg-em-dimensions@^5.4.0": version "5.4.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== "@svgr/babel-plugin-transform-react-native-svg@^5.4.0": version "5.4.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== "@svgr/babel-plugin-transform-svg-component@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== "@svgr/babel-preset@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== dependencies: "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" @@ -3900,7 +4126,8 @@ "@svgr/core@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/core/-/core-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== dependencies: "@svgr/plugin-jsx" "^5.5.0" camelcase "^6.2.0" @@ -3908,13 +4135,15 @@ "@svgr/hast-util-to-babel-ast@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== dependencies: "@babel/types" "^7.12.6" "@svgr/plugin-jsx@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== dependencies: "@babel/core" "^7.12.3" "@svgr/babel-preset" "^5.5.0" @@ -3923,7 +4152,8 @@ "@svgr/plugin-svgo@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== dependencies: cosmiconfig "^7.0.0" deepmerge "^4.2.2" @@ -3931,7 +4161,8 @@ "@svgr/webpack@^5.5.0": version "5.5.0" - resolved "https://registry.npmjs.org/@svgr/webpack/-/webpack-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== dependencies: "@babel/core" "^7.12.3" "@babel/plugin-transform-react-constant-elements" "^7.12.1" @@ -3945,6 +4176,7 @@ "@testing-library/dom@^8.0.0": version "8.20.1" resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.20.1.tgz#2e52a32e46fc88369eef7eef634ac2a192decd9f" + integrity sha512-/DiOQ5xBxgdYRC8LNk7U+RWat0S3qRLeIw3ZIkMQ9kkVlRmwD/Eg8k8CqIpD6GW7u20JIUOfMKbxtiLutpjQ4g== dependencies: "@babel/code-frame" "^7.10.4" "@babel/runtime" "^7.12.5" @@ -3956,8 +4188,9 @@ pretty-format "^27.0.2" "@testing-library/jest-dom@^5.16.2": - version "5.16.5" - resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" + version "5.17.0" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.17.0.tgz#5e97c8f9a15ccf4656da00fecab505728de81e0c" + integrity sha512-ynmNeT7asXyH3aSVv4vvX4Rb+0qjOhdNHnO/3vuZNqPmhDpV/+rCSGwQ7bLcmU2cJ4dvoheIO85LQj0IbJHEtg== dependencies: "@adobe/css-tools" "^4.0.1" "@babel/runtime" "^7.9.2" @@ -3972,6 +4205,7 @@ "@testing-library/react@^12.1.4": version "12.1.5" resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-12.1.5.tgz#bb248f72f02a5ac9d949dea07279095fa577963b" + integrity sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg== dependencies: "@babel/runtime" "^7.12.5" "@testing-library/dom" "^8.0.0" @@ -3979,42 +4213,50 @@ "@testing-library/user-event@^13.5.0": version "13.5.0" - resolved "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz" + resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== dependencies: "@babel/runtime" "^7.12.5" "@tootallnate/once@1": version "1.1.2" - resolved "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== "@trysound/sax@0.2.0": version "0.2.0" - resolved "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== "@turf/boolean-clockwise@6.5.0": version "6.5.0" - resolved "https://registry.npmjs.org/@turf/boolean-clockwise/-/boolean-clockwise-6.5.0.tgz" + resolved "https://registry.yarnpkg.com/@turf/boolean-clockwise/-/boolean-clockwise-6.5.0.tgz#34573ecc18f900080f00e4ff364631a8b1135794" + integrity sha512-45+C7LC5RMbRWrxh3Z0Eihsc8db1VGBO5d9BLTOAwU4jR6SgsunTfRWR16X7JUwIDYlCVEmnjcXJNi/kIU3VIw== dependencies: "@turf/helpers" "^6.5.0" "@turf/invariant" "^6.5.0" "@turf/helpers@^6.5.0": version "6.5.0" - resolved "https://registry.npmjs.org/@turf/helpers/-/helpers-6.5.0.tgz" + resolved "https://registry.yarnpkg.com/@turf/helpers/-/helpers-6.5.0.tgz#f79af094bd6b8ce7ed2bd3e089a8493ee6cae82e" + integrity sha512-VbI1dV5bLFzohYYdgqwikdMVpe7pJ9X3E+dlr425wa2/sMJqYDhTO++ec38/pcPvPE6oD9WEEeU3Xu3gza+VPw== "@turf/invariant@^6.5.0": version "6.5.0" - resolved "https://registry.npmjs.org/@turf/invariant/-/invariant-6.5.0.tgz" + resolved "https://registry.yarnpkg.com/@turf/invariant/-/invariant-6.5.0.tgz#970afc988023e39c7ccab2341bd06979ddc7463f" + integrity sha512-Wv8PRNCtPD31UVbdJE/KVAWKe7l6US+lJItRR/HOEW3eh+U/JwRCSUl/KZ7bmjM/C+zLNoreM2TU6OoLACs4eg== dependencies: "@turf/helpers" "^6.5.0" "@types/aria-query@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.1.tgz#3286741fb8f1e1580ac28784add4c7a1d49bdfbc" + integrity sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q== "@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": version "7.20.1" resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" + integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== dependencies: "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" @@ -4024,13 +4266,15 @@ "@types/babel__generator@*": version "7.6.4" - resolved "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== dependencies: "@babel/types" "^7.0.0" "@types/babel__template@*": version "7.4.1" - resolved "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== dependencies: "@babel/parser" "^7.1.0" "@babel/types" "^7.0.0" @@ -4038,49 +4282,57 @@ "@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": version "7.20.1" resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.1.tgz#dd6f1d2411ae677dcb2db008c962598be31d6acf" + integrity sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg== dependencies: "@babel/types" "^7.20.7" "@types/body-parser@*": version "1.19.2" - resolved "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== dependencies: "@types/connect" "*" "@types/node" "*" "@types/bonjour@^3.5.9": version "3.5.10" - resolved "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.10.tgz" + resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== dependencies: "@types/node" "*" "@types/connect-history-api-fallback@^1.3.5": version "1.5.0" resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#9fd20b3974bdc2bcd4ac6567e2e0f6885cb2cf41" + integrity sha512-4x5FkPpLipqwthjPsF7ZRbOv3uoLUFkTA9G9v583qi4pACvq0uTELrB8OLUzPWUI4IJIyvM85vzkV1nyiI2Lig== dependencies: "@types/express-serve-static-core" "*" "@types/node" "*" "@types/connect@*": version "3.4.35" - resolved "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== dependencies: "@types/node" "*" "@types/cookie@^0.3.3": version "0.3.3" - resolved "https://registry.npmjs.org/@types/cookie/-/cookie-0.3.3.tgz" + resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.3.3.tgz#85bc74ba782fb7aa3a514d11767832b0e3bc6803" + integrity sha512-LKVP3cgXBT9RYj+t+9FDKwS5tdI+rPBXaNSkma7hvqy35lc7mAokC2zsqWJH0LaqIt3B962nuYI77hsJoT1gow== "@types/eslint-scope@^3.7.3": version "3.7.4" - resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== dependencies: "@types/eslint" "*" "@types/estree" "*" "@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": - version "8.44.0" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.0.tgz#55818eabb376e2272f77fbf5c96c43137c3c1e53" + version "8.44.2" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.2.tgz#0d21c505f98a89b8dd4d37fa162b09da6089199a" + integrity sha512-sdPRb9K6iL5XZOmBubg8yiFp5yS/JdUDQsq5e6h95km91MCYMuvp7mh1fjPEYUhvHepKpZOjnEaMBR4PxjWDzg== dependencies: "@types/estree" "*" "@types/json-schema" "*" @@ -4088,14 +4340,17 @@ "@types/estree@*", "@types/estree@^1.0.0": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" + integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== "@types/estree@0.0.39": version "0.0.39" - resolved "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": version "4.17.35" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.35.tgz#c95dd4424f0d32e525d23812aa8ab8e4d3906c4f" + integrity sha512-wALWQwrgiB2AWTT91CB62b6Yt0sNHpznUXeZEcnPU3DRdlDIz74x8Qg1UUYKSVFi+va5vKOLYRBI1bRKiLLKIg== dependencies: "@types/node" "*" "@types/qs" "*" @@ -4105,6 +4360,7 @@ "@types/express@*", "@types/express@^4.17.13": version "4.17.17" resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.17.tgz#01d5437f6ef9cfa8668e616e13c2f2ac9a491ae4" + integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== dependencies: "@types/body-parser" "*" "@types/express-serve-static-core" "^4.17.33" @@ -4113,50 +4369,59 @@ "@types/graceful-fs@^4.1.2": version "4.1.6" - resolved "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" + integrity sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw== dependencies: "@types/node" "*" "@types/hoist-non-react-statics@^3.3.0": version "3.3.1" - resolved "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz" + resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f" + integrity sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== dependencies: "@types/react" "*" hoist-non-react-statics "^3.3.0" "@types/html-minifier-terser@^6.0.0": version "6.1.0" - resolved "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== "@types/http-errors@*": version "2.0.1" resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" + integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== "@types/http-proxy@^1.17.8": version "1.17.11" resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" + integrity sha512-HC8G7c1WmaF2ekqpnFq626xd3Zz0uvaqFmBJNRZCGEZCXkvSdJoNFn/8Ygbd9fKNQj8UzLdCETaI0UWPAjK7IA== dependencies: "@types/node" "*" "@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": version "2.0.4" - resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== "@types/istanbul-lib-report@*": version "3.0.0" - resolved "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== dependencies: "@types/istanbul-lib-coverage" "*" "@types/istanbul-reports@^3.0.0": version "3.0.1" - resolved "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== dependencies: "@types/istanbul-lib-report" "*" "@types/jest@*": - version "29.5.2" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.2.tgz#86b4afc86e3a8f3005b297ed8a72494f89e6395b" + version "29.5.3" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.3.tgz#7a35dc0044ffb8b56325c6802a4781a626b05777" + integrity sha512-1Nq7YrO/vJE/FYnqYyw0FS8LdrjExSgIiHyKg7xPpn+yi8Q4huZryKnkJatN1ZRH89Kw2v33/8ZMB7DuZeSLlA== dependencies: expect "^29.0.0" pretty-format "^29.0.0" @@ -4164,72 +4429,89 @@ "@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.12" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" + integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== "@types/json5@^0.0.29": version "0.0.29" - resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== "@types/lodash@^4.14.175": - version "4.14.195" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.195.tgz#bafc975b252eb6cea78882ce8a7b6bf22a6de632" + version "4.14.197" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.197.tgz#e95c5ddcc814ec3e84c891910a01e0c8a378c54b" + integrity sha512-BMVOiWs0uNxHVlHBgzTIqJYmj+PgCo4euloGF+5m4okL3rEYzM2EEv78mw8zWSMM57dM7kVIgJ2QDvwHSoCI5g== "@types/mime@*": version "3.0.1" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== "@types/mime@^1": version "1.3.2" resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" + integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + +"@types/node-fetch@2.6.4": + version "2.6.4" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660" + integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg== + dependencies: + "@types/node" "*" + form-data "^3.0.0" "@types/node@*", "@types/node@>=6": - version "20.4.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.4.1.tgz#a6033a8718653c50ac4962977e14d0f984d9527d" + version "20.5.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.5.0.tgz#7fc8636d5f1aaa3b21e6245e97d56b7f56702313" + integrity sha512-Mgq7eCtoTjT89FqNoTzzXg2XvCi5VMhRV6+I2aYanc6kQCBImeNaAYRs/DyoVqk1YEUJK5gN9VO7HRIdz4Wo3Q== "@types/parse-json@^4.0.0": version "4.0.0" - resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== "@types/prettier@^2.1.5": version "2.7.3" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" + integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== "@types/prop-types@*", "@types/prop-types@^15.7.5": version "15.7.5" - resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== "@types/q@^1.5.1": version "1.5.5" - resolved "https://registry.npmjs.org/@types/q/-/q-1.5.5.tgz" + resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== "@types/qs@*": version "6.9.7" - resolved "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== "@types/range-parser@*": version "1.2.4" - resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== "@types/react-dom@<18.0.0": version "17.0.20" resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.20.tgz#e0c8901469d732b36d8473b40b679ad899da1b53" + integrity sha512-4pzIjSxDueZZ90F52mU3aPoogkHIoSIDG+oQ+wQK7Cy2B9S+MvOqY0uEA/qawKz381qrEDkvpwyt8Bm31I8sbA== dependencies: "@types/react" "^17" -"@types/react-is@^18.2.0": - version "18.2.0" - resolved "https://registry.npmjs.org/@types/react-is/-/react-is-18.2.0.tgz" - dependencies: - "@types/react" "*" - "@types/react-is@^18.2.1": version "18.2.1" resolved "https://registry.yarnpkg.com/@types/react-is/-/react-is-18.2.1.tgz#61d01c2a6fc089a53520c0b66996d458fdc46863" + integrity sha512-wyUkmaaSZEzFZivD8F2ftSyAfk6L+DfFliVj/mYdOXbVjRcS87fQJLTnhk6dRZPuJjI+9g6RZJO4PNCngUrmyw== dependencies: "@types/react" "*" "@types/react-redux@^7.1.20": version "7.1.25" resolved "https://registry.yarnpkg.com/@types/react-redux/-/react-redux-7.1.25.tgz#de841631205b24f9dfb4967dd4a7901e048f9a88" + integrity sha512-bAGh4e+w5D8dajd6InASVIyCo4pZLJ66oLb80F9OBLO1gKESbZcRCJpTT6uLXX+HAB57zw1WTdwJdAsewuTweg== dependencies: "@types/hoist-non-react-statics" "^3.3.0" "@types/react" "*" @@ -4238,21 +4520,24 @@ "@types/react-transition-group@^4.4.5", "@types/react-transition-group@^4.4.6": version "4.4.6" - resolved "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.6.tgz" + resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-4.4.6.tgz#18187bcda5281f8e10dfc48f0943e2fdf4f75e2e" + integrity sha512-VnCdSxfcm08KjsJVQcfBmhEQAPnLB8G08hAxn39azX1qYBQ/5RVQuoHuKIcfKOdncuaUvEpFKFzEvbtIMsfVew== dependencies: "@types/react" "*" "@types/react@*": - version "18.2.14" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.14.tgz#fa7a6fecf1ce35ca94e74874f70c56ce88f7a127" + version "18.2.20" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.20.tgz#1605557a83df5c8a2cc4eeb743b3dfc0eb6aaeb2" + integrity sha512-WKNtmsLWJM/3D5mG4U84cysVY31ivmyw85dE84fOCk5Hx78wezB/XEjVPWl2JTZ5FkEeaTJf+VgUAUn3PE7Isw== dependencies: "@types/prop-types" "*" "@types/scheduler" "*" csstype "^3.0.2" "@types/react@^17": - version "17.0.62" - resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.62.tgz#2efe8ddf8533500ec44b1334dd1a97caa2f860e3" + version "17.0.64" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.64.tgz#468162c66c33ddb4548eb1a0e36682028d9e9a62" + integrity sha512-IlgbX/vglDTwrCRgad6fTCzOT+D/5C0xwuvrzfuqfhg9gJrkFqAGADpUFlEtqbrP1IEo9QLSbo41MaFfoIu9Aw== dependencies: "@types/prop-types" "*" "@types/scheduler" "*" @@ -4260,38 +4545,45 @@ "@types/resolve@1.17.1": version "1.17.1" - resolved "https://registry.npmjs.org/@types/resolve/-/resolve-1.17.1.tgz" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== dependencies: "@types/node" "*" "@types/retry@0.12.0": version "0.12.0" - resolved "https://registry.npmjs.org/@types/retry/-/retry-0.12.0.tgz" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== "@types/scheduler@*": version "0.16.3" resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.3.tgz#cef09e3ec9af1d63d2a6cc5b383a737e24e6dcf5" + integrity sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ== "@types/semver@^7.3.12": version "7.5.0" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.0.tgz#591c1ce3a702c45ee15f47a42ade72c2fd78978a" + integrity sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw== "@types/send@*": version "0.17.1" resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.1.tgz#ed4932b8a2a805f1fe362a70f4e62d0ac994e301" + integrity sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q== dependencies: "@types/mime" "^1" "@types/node" "*" "@types/serve-index@^1.9.1": version "1.9.1" - resolved "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.1.tgz" + resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== dependencies: "@types/express" "*" "@types/serve-static@*", "@types/serve-static@^1.13.10": version "1.15.2" resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.2.tgz#3e5419ecd1e40e7405d34093f10befb43f63381a" + integrity sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw== dependencies: "@types/http-errors" "*" "@types/mime" "*" @@ -4299,58 +4591,68 @@ "@types/sockjs@^0.3.33": version "0.3.33" - resolved "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz" + resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== dependencies: "@types/node" "*" "@types/stack-utils@^2.0.0": version "2.0.1" - resolved "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== "@types/testing-library__jest-dom@^5.9.1": - version "5.14.7" - resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.7.tgz#fff92bed2a32c58a9224a85603e731519c0a9037" + version "5.14.9" + resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.9.tgz#0fb1e6a0278d87b6737db55af5967570b67cb466" + integrity sha512-FSYhIjFlfOpGSRyVoMBMuS3ws5ehFQODymf3vlI7U1K8c7PHwWwFY7VREfmsuzHSOnoKs/9/Y983ayOs7eRzqw== dependencies: "@types/jest" "*" "@types/trusted-types@^2.0.2": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.3.tgz#a136f83b0758698df454e328759dbd3d44555311" + integrity sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g== "@types/ws@^8.5.5": version "8.5.5" resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb" + integrity sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg== dependencies: "@types/node" "*" "@types/yargs-parser@*": version "21.0.0" - resolved "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== "@types/yargs@^16.0.0": version "16.0.5" - resolved "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.5.tgz" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.5.tgz#12cc86393985735a283e387936398c2f9e5f88e3" + integrity sha512-AxO/ADJOBFJScHbWhq2xAhlWP24rY4aCEG/NFaMvbT3X2MgRsLjhjQwsn0Zi5zn0LG9jUhCCZMeX9Dkuw6k+vQ== dependencies: "@types/yargs-parser" "*" "@types/yargs@^17.0.8": version "17.0.24" - resolved "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" + integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== dependencies: "@types/yargs-parser" "*" "@types/zen-observable@^0.8.0": version "0.8.3" - resolved "https://registry.npmjs.org/@types/zen-observable/-/zen-observable-0.8.3.tgz" + resolved "https://registry.yarnpkg.com/@types/zen-observable/-/zen-observable-0.8.3.tgz#781d360c282436494b32fe7d9f7f8e64b3118aa3" + integrity sha512-fbF6oTd4sGGy0xjHPKAt+eS2CrxJ3+6gQ3FGcBoIJR2TLAyCkCyI8JqZNy+FeON0AhVgNJoUumVoZQjBFUqHkw== "@typescript-eslint/eslint-plugin@^5.5.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.61.0.tgz#a1a5290cf33863b4db3fb79350b3c5275a7b1223" + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz#aeef0328d172b9e37d9bab6dbc13b87ed88977db" + integrity sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag== dependencies: "@eslint-community/regexpp" "^4.4.0" - "@typescript-eslint/scope-manager" "5.61.0" - "@typescript-eslint/type-utils" "5.61.0" - "@typescript-eslint/utils" "5.61.0" + "@typescript-eslint/scope-manager" "5.62.0" + "@typescript-eslint/type-utils" "5.62.0" + "@typescript-eslint/utils" "5.62.0" debug "^4.3.4" graphemer "^1.4.0" ignore "^5.2.0" @@ -4359,75 +4661,84 @@ tsutils "^3.21.0" "@typescript-eslint/experimental-utils@^5.0.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.61.0.tgz#5ab9f8f1f7e7a43c68a48c450d972c7e400a2be4" + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.62.0.tgz#14559bf73383a308026b427a4a6129bae2146741" + integrity sha512-RTXpeB3eMkpoclG3ZHft6vG/Z30azNHuqY6wKPBHlVMZFuEvrtlEDe8gMqDb+SO+9hjC/pLekeSCryf9vMZlCw== dependencies: - "@typescript-eslint/utils" "5.61.0" + "@typescript-eslint/utils" "5.62.0" "@typescript-eslint/parser@^5.5.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.61.0.tgz#7fbe3e2951904bb843f8932ebedd6e0635bffb70" + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.62.0.tgz#1b63d082d849a2fcae8a569248fbe2ee1b8a56c7" + integrity sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA== dependencies: - "@typescript-eslint/scope-manager" "5.61.0" - "@typescript-eslint/types" "5.61.0" - "@typescript-eslint/typescript-estree" "5.61.0" + "@typescript-eslint/scope-manager" "5.62.0" + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/typescript-estree" "5.62.0" debug "^4.3.4" -"@typescript-eslint/scope-manager@5.61.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.61.0.tgz#b670006d069c9abe6415c41f754b1b5d949ef2b2" +"@typescript-eslint/scope-manager@5.62.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz#d9457ccc6a0b8d6b37d0eb252a23022478c5460c" + integrity sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w== dependencies: - "@typescript-eslint/types" "5.61.0" - "@typescript-eslint/visitor-keys" "5.61.0" + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/visitor-keys" "5.62.0" -"@typescript-eslint/type-utils@5.61.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.61.0.tgz#e90799eb2045c4435ea8378cb31cd8a9fddca47a" +"@typescript-eslint/type-utils@5.62.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz#286f0389c41681376cdad96b309cedd17d70346a" + integrity sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew== dependencies: - "@typescript-eslint/typescript-estree" "5.61.0" - "@typescript-eslint/utils" "5.61.0" + "@typescript-eslint/typescript-estree" "5.62.0" + "@typescript-eslint/utils" "5.62.0" debug "^4.3.4" tsutils "^3.21.0" -"@typescript-eslint/types@5.61.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.61.0.tgz#e99ff11b5792d791554abab0f0370936d8ca50c0" +"@typescript-eslint/types@5.62.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.62.0.tgz#258607e60effa309f067608931c3df6fed41fd2f" + integrity sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ== -"@typescript-eslint/typescript-estree@5.61.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.61.0.tgz#4c7caca84ce95bb41aa585d46a764bcc050b92f3" +"@typescript-eslint/typescript-estree@5.62.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz#7d17794b77fabcac615d6a48fb143330d962eb9b" + integrity sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA== dependencies: - "@typescript-eslint/types" "5.61.0" - "@typescript-eslint/visitor-keys" "5.61.0" + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/visitor-keys" "5.62.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/utils@5.61.0", "@typescript-eslint/utils@^5.58.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.61.0.tgz#5064838a53e91c754fffbddd306adcca3fe0af36" +"@typescript-eslint/utils@5.62.0", "@typescript-eslint/utils@^5.58.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.62.0.tgz#141e809c71636e4a75daa39faed2fb5f4b10df86" + integrity sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ== dependencies: "@eslint-community/eslint-utils" "^4.2.0" "@types/json-schema" "^7.0.9" "@types/semver" "^7.3.12" - "@typescript-eslint/scope-manager" "5.61.0" - "@typescript-eslint/types" "5.61.0" - "@typescript-eslint/typescript-estree" "5.61.0" + "@typescript-eslint/scope-manager" "5.62.0" + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/typescript-estree" "5.62.0" eslint-scope "^5.1.1" semver "^7.3.7" -"@typescript-eslint/visitor-keys@5.61.0": - version "5.61.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.61.0.tgz#c79414fa42158fd23bd2bb70952dc5cdbb298140" +"@typescript-eslint/visitor-keys@5.62.0": + version "5.62.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz#2174011917ce582875954ffe2f6912d5931e353e" + integrity sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw== dependencies: - "@typescript-eslint/types" "5.61.0" + "@typescript-eslint/types" "5.62.0" eslint-visitor-keys "^3.3.0" "@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" + integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" @@ -4435,18 +4746,22 @@ "@webassemblyjs/floating-point-hex-parser@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== "@webassemblyjs/helper-api-error@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== "@webassemblyjs/helper-buffer@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" + integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== "@webassemblyjs/helper-numbers@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: "@webassemblyjs/floating-point-hex-parser" "1.11.6" "@webassemblyjs/helper-api-error" "1.11.6" @@ -4455,10 +4770,12 @@ "@webassemblyjs/helper-wasm-bytecode@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== "@webassemblyjs/helper-wasm-section@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" + integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== dependencies: "@webassemblyjs/ast" "1.11.6" "@webassemblyjs/helper-buffer" "1.11.6" @@ -4468,22 +4785,26 @@ "@webassemblyjs/ieee754@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== "@webassemblyjs/wasm-edit@^1.11.5": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" + integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== dependencies: "@webassemblyjs/ast" "1.11.6" "@webassemblyjs/helper-buffer" "1.11.6" @@ -4497,6 +4818,7 @@ "@webassemblyjs/wasm-gen@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" + integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== dependencies: "@webassemblyjs/ast" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" @@ -4507,6 +4829,7 @@ "@webassemblyjs/wasm-opt@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" + integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== dependencies: "@webassemblyjs/ast" "1.11.6" "@webassemblyjs/helper-buffer" "1.11.6" @@ -4516,6 +4839,7 @@ "@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" + integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== dependencies: "@webassemblyjs/ast" "1.11.6" "@webassemblyjs/helper-api-error" "1.11.6" @@ -4527,69 +4851,74 @@ "@webassemblyjs/wast-printer@1.11.6": version "1.11.6" resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" + integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== dependencies: "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" "@wry/context@^0.4.0": version "0.4.4" - resolved "https://registry.npmjs.org/@wry/context/-/context-0.4.4.tgz" + resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.4.4.tgz#e50f5fa1d6cfaabf2977d1fda5ae91717f8815f8" + integrity sha512-LrKVLove/zw6h2Md/KZyWxIkFM6AoyKp71OqpH9Hiip1csjPVoD3tPxlbQUNxEnHENks3UGgNpSBCAfq9KWuag== dependencies: "@types/node" ">=6" tslib "^1.9.3" -"@wry/context@^0.7.0": +"@wry/context@^0.7.0", "@wry/context@^0.7.3": version "0.7.3" resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.7.3.tgz#240f6dfd4db5ef54f81f6597f6714e58d4f476a1" + integrity sha512-Nl8WTesHp89RF803Se9X3IiHjdmLBrIvPMaJkl+rKVJAYyPsz1TEUbu89943HpvujtSJgDUx9W4vZw3K1Mr3sA== dependencies: tslib "^2.3.0" "@wry/equality@^0.1.2": version "0.1.11" - resolved "https://registry.npmjs.org/@wry/equality/-/equality-0.1.11.tgz" + resolved "https://registry.yarnpkg.com/@wry/equality/-/equality-0.1.11.tgz#35cb156e4a96695aa81a9ecc4d03787bc17f1790" + integrity sha512-mwEVBDUVODlsQQ5dfuLUS5/Tf7jqUKyhKYHmVi4fPB6bDMOfWvUPJmKgS1Z7Za/sOI3vzWt4+O7yCiL/70MogA== dependencies: tslib "^1.9.3" -"@wry/equality@^0.5.0": +"@wry/equality@^0.5.6": version "0.5.6" resolved "https://registry.yarnpkg.com/@wry/equality/-/equality-0.5.6.tgz#cd4a533c72c3752993ab8cbf682d3d20e3cb601e" + integrity sha512-D46sfMTngaYlrH+OspKf8mIJETntFnf6Hsjb0V41jAXJ7Bx2kB8Rv8RCUujuVWYttFtHkUNp7g+FwxNQAr6mXA== dependencies: tslib "^2.3.0" -"@wry/trie@^0.3.0": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@wry/trie/-/trie-0.3.2.tgz#a06f235dc184bd26396ba456711f69f8c35097e6" - dependencies: - tslib "^2.3.0" - -"@wry/trie@^0.4.0": +"@wry/trie@^0.4.3": version "0.4.3" resolved "https://registry.yarnpkg.com/@wry/trie/-/trie-0.4.3.tgz#077d52c22365871bf3ffcbab8e95cb8bc5689af4" + integrity sha512-I6bHwH0fSf6RqQcnnXLJKhkSXG45MFral3GxPaY4uAl0LYDZM+YDVDAiU9bYwjTuysy1S0IeecWtmq1SZA3M1w== dependencies: tslib "^2.3.0" "@xtuc/ieee754@^1.2.0": version "1.2.0" - resolved "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" - resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" + resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== abab@^2.0.3, abab@^2.0.5: version "2.0.6" - resolved "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" - resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== dependencies: mime-types "~2.1.34" negotiator "0.6.3" acorn-globals@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== dependencies: acorn "^7.1.1" acorn-walk "^7.1.1" @@ -4597,59 +4926,71 @@ acorn-globals@^6.0.0: acorn-import-assertions@^1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== acorn-jsx@^5.3.2: version "5.3.2" - resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^7.1.1: version "7.2.0" - resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== acorn@^7.1.1: version "7.4.1" - resolved "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== acorn@^8.2.4, acorn@^8.7.1, acorn@^8.8.2, acorn@^8.9.0: version "8.10.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" + integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== address@^1.0.1, address@^1.1.2: version "1.2.2" - resolved "https://registry.npmjs.org/address/-/address-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e" + integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== adjust-sourcemap-loader@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== dependencies: loader-utils "^2.0.0" regex-parser "^2.2.11" agent-base@6: version "6.0.2" - resolved "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== dependencies: debug "4" ajv-formats@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== dependencies: ajv "^8.0.0" ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: version "3.5.2" - resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== ajv-keywords@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== dependencies: fast-deep-equal "^3.1.3" -ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" - resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" fast-json-stable-stringify "^2.0.0" @@ -4658,16 +4999,18 @@ ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: ajv@^8.0.0, ajv@^8.6.0, ajv@^8.9.0: version "8.12.0" - resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== dependencies: fast-deep-equal "^3.1.1" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" uri-js "^4.2.2" -amazon-cognito-identity-js@6.2.0: - version "6.2.0" - resolved "https://registry.npmjs.org/amazon-cognito-identity-js/-/amazon-cognito-identity-js-6.2.0.tgz" +amazon-cognito-identity-js@6.3.3: + version "6.3.3" + resolved "https://registry.yarnpkg.com/amazon-cognito-identity-js/-/amazon-cognito-identity-js-6.3.3.tgz#d301309827aa7d74d6e3892cc27f25332c5cba3c" + integrity sha512-pw70WNbyfRPgCr3SsvMlCO/sADUSVytTMwhyTALPG62lmdBeYkvaXMLkQDerN15odSQHG+WFlNmDPCySEfKlNA== dependencies: "@aws-crypto/sha256-js" "1.2.2" buffer "4.9.2" @@ -4678,55 +5021,66 @@ amazon-cognito-identity-js@6.2.0: amazon-quicksight-embedding-sdk@^1.18.1: version "1.20.1" resolved "https://registry.yarnpkg.com/amazon-quicksight-embedding-sdk/-/amazon-quicksight-embedding-sdk-1.20.1.tgz#bb0f574365b36bc790ac4c43c80ffd6cc2cc0af1" + integrity sha512-JYBCeq1Rq1OOeZQB9ry85uASTTtqZzp5hFv0ihIX7XyD9anvcoW6GHo4ttzdkJMTMMAnzXhIBe9CRKm3tHMzeg== ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: version "4.3.2" - resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" ansi-html-community@^0.0.8: version "0.0.8" - resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" + resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== ansi-regex@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" ansi-styles@^5.0.0: version "5.2.0" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== any-promise@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== anymatch@^3.0.3, anymatch@~3.1.2: version "3.1.3" - resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" apexcharts@^3.33.2: - version "3.41.0" - resolved "https://registry.yarnpkg.com/apexcharts/-/apexcharts-3.41.0.tgz#7aef77275c19dfb925552d6fc8e027443a6d1337" + version "3.41.1" + resolved "https://registry.yarnpkg.com/apexcharts/-/apexcharts-3.41.1.tgz#69669ae6225d4e7a32bdfd9750157e236d56b6e6" + integrity sha512-kta8fhXrfZYqW7K9kF7FqZ6imQaC6moyRgcUZjwIky/oeHVVISSN/2rjUIvZXnwxWHiSdDHMqLy+TqJhB4DXFA== dependencies: svg.draggable.js "^2.2.2" svg.easing.js "^2.0.0" @@ -4737,7 +5091,8 @@ apexcharts@^3.33.2: apollo-boost@^0.4.9: version "0.4.9" - resolved "https://registry.npmjs.org/apollo-boost/-/apollo-boost-0.4.9.tgz" + resolved "https://registry.yarnpkg.com/apollo-boost/-/apollo-boost-0.4.9.tgz#ab3ba539c2ca944e6fd156583a1b1954b17a6791" + integrity sha512-05y5BKcDaa8w47f8d81UVwKqrAjn8uKLv6QM9fNdldoNzQ+rnOHgFlnrySUZRz9QIT3vPftQkEz2UEASp1Mi5g== dependencies: apollo-cache "^1.3.5" apollo-cache-inmemory "^1.6.6" @@ -4751,7 +5106,8 @@ apollo-boost@^0.4.9: apollo-cache-inmemory@^1.6.6: version "1.6.6" - resolved "https://registry.npmjs.org/apollo-cache-inmemory/-/apollo-cache-inmemory-1.6.6.tgz" + resolved "https://registry.yarnpkg.com/apollo-cache-inmemory/-/apollo-cache-inmemory-1.6.6.tgz#56d1f2a463a6b9db32e9fa990af16d2a008206fd" + integrity sha512-L8pToTW/+Xru2FFAhkZ1OA9q4V4nuvfoPecBM34DecAugUZEBhI2Hmpgnzq2hTKZ60LAMrlqiASm0aqAY6F8/A== dependencies: apollo-cache "^1.3.5" apollo-utilities "^1.3.4" @@ -4761,14 +5117,16 @@ apollo-cache-inmemory@^1.6.6: apollo-cache@1.3.5, apollo-cache@^1.3.5: version "1.3.5" - resolved "https://registry.npmjs.org/apollo-cache/-/apollo-cache-1.3.5.tgz" + resolved "https://registry.yarnpkg.com/apollo-cache/-/apollo-cache-1.3.5.tgz#9dbebfc8dbe8fe7f97ba568a224bca2c5d81f461" + integrity sha512-1XoDy8kJnyWY/i/+gLTEbYLnoiVtS8y7ikBr/IfmML4Qb+CM7dEEbIUOjnY716WqmZ/UpXIxTfJsY7rMcqiCXA== dependencies: apollo-utilities "^1.3.4" tslib "^1.10.0" apollo-client@^2.6.10: version "2.6.10" - resolved "https://registry.npmjs.org/apollo-client/-/apollo-client-2.6.10.tgz" + resolved "https://registry.yarnpkg.com/apollo-client/-/apollo-client-2.6.10.tgz#86637047b51d940c8eaa771a4ce1b02df16bea6a" + integrity sha512-jiPlMTN6/5CjZpJOkGeUV0mb4zxx33uXWdj/xQCfAMkuNAC3HN7CvYDyMHHEzmcQ5GV12LszWoQ/VlxET24CtA== dependencies: "@types/zen-observable" "^0.8.0" apollo-cache "1.3.5" @@ -4781,7 +5139,8 @@ apollo-client@^2.6.10: apollo-link-error@^1.0.3: version "1.1.13" - resolved "https://registry.npmjs.org/apollo-link-error/-/apollo-link-error-1.1.13.tgz" + resolved "https://registry.yarnpkg.com/apollo-link-error/-/apollo-link-error-1.1.13.tgz#c1a1bb876ffe380802c8df0506a32c33aad284cd" + integrity sha512-jAZOOahJU6bwSqb2ZyskEK1XdgUY9nkmeclCrW7Gddh1uasHVqmoYc4CKdb0/H0Y1J9lvaXKle2Wsw/Zx1AyUg== dependencies: apollo-link "^1.2.14" apollo-link-http-common "^0.2.16" @@ -4789,7 +5148,8 @@ apollo-link-error@^1.0.3: apollo-link-http-common@^0.2.16: version "0.2.16" - resolved "https://registry.npmjs.org/apollo-link-http-common/-/apollo-link-http-common-0.2.16.tgz" + resolved "https://registry.yarnpkg.com/apollo-link-http-common/-/apollo-link-http-common-0.2.16.tgz#756749dafc732792c8ca0923f9a40564b7c59ecc" + integrity sha512-2tIhOIrnaF4UbQHf7kjeQA/EmSorB7+HyJIIrUjJOKBgnXwuexi8aMecRlqTIDWcyVXCeqLhUnztMa6bOH/jTg== dependencies: apollo-link "^1.2.14" ts-invariant "^0.4.0" @@ -4797,7 +5157,8 @@ apollo-link-http-common@^0.2.16: apollo-link-http@^1.3.1: version "1.5.17" - resolved "https://registry.npmjs.org/apollo-link-http/-/apollo-link-http-1.5.17.tgz" + resolved "https://registry.yarnpkg.com/apollo-link-http/-/apollo-link-http-1.5.17.tgz#499e9f1711bf694497f02c51af12d82de5d8d8ba" + integrity sha512-uWcqAotbwDEU/9+Dm9e1/clO7hTB2kQ/94JYcGouBVLjoKmTeJTUPQKcJGpPwUjZcSqgYicbFqQSoJIW0yrFvg== dependencies: apollo-link "^1.2.14" apollo-link-http-common "^0.2.16" @@ -4805,7 +5166,8 @@ apollo-link-http@^1.3.1: apollo-link@^1.0.0, apollo-link@^1.0.6, apollo-link@^1.2.14: version "1.2.14" - resolved "https://registry.npmjs.org/apollo-link/-/apollo-link-1.2.14.tgz" + resolved "https://registry.yarnpkg.com/apollo-link/-/apollo-link-1.2.14.tgz#3feda4b47f9ebba7f4160bef8b977ba725b684d9" + integrity sha512-p67CMEFP7kOG1JZ0ZkYZwRDa369w5PIjtMjvrQd/HnIV8FRsHRqLqK+oAZQnFa1DDdZtOtHTi+aMIW6EatC2jg== dependencies: apollo-utilities "^1.3.0" ts-invariant "^0.4.0" @@ -4814,16 +5176,18 @@ apollo-link@^1.0.0, apollo-link@^1.0.6, apollo-link@^1.2.14: apollo-utilities@1.3.4, apollo-utilities@^1.3.0, apollo-utilities@^1.3.4: version "1.3.4" - resolved "https://registry.npmjs.org/apollo-utilities/-/apollo-utilities-1.3.4.tgz" + resolved "https://registry.yarnpkg.com/apollo-utilities/-/apollo-utilities-1.3.4.tgz#6129e438e8be201b6c55b0f13ce49d2c7175c9cf" + integrity sha512-pk2hiWrCXMAy2fRPwEyhvka+mqwzeP60Jr1tRYi5xru+3ko94HI9o6lK0CT33/w4RDlxWchmdhDCrvdr+pHCig== dependencies: "@wry/equality" "^0.1.2" fast-json-stable-stringify "^2.0.0" ts-invariant "^0.4.0" tslib "^1.10.0" -appbase-js@^5.2.0: - version "5.2.0" - resolved "https://registry.npmjs.org/appbase-js/-/appbase-js-5.2.0.tgz" +appbase-js@^5.3.4: + version "5.3.4" + resolved "https://registry.yarnpkg.com/appbase-js/-/appbase-js-5.3.4.tgz#c30add29133fcfd7cc938a710a347cb89377670e" + integrity sha512-rbjCC7jmcPh1cjnW3W7TjTugQpLOB8SrPIdnkKwrNDdsl8d0nzPTJH4/082UTnkvlMkjm4WYY2HYCRl2vpDlpw== dependencies: cross-fetch "^3.1.5" querystring "^0.2.0" @@ -4831,48 +5195,57 @@ appbase-js@^5.2.0: arg@^5.0.2: version "5.0.2" - resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== argparse@^1.0.7: version "1.0.10" - resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== aria-query@5.1.3: version "5.1.3" resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.1.3.tgz#19db27cd101152773631396f7a95a3b58c22c35e" + integrity sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ== dependencies: deep-equal "^2.0.5" aria-query@^5.0.0, aria-query@^5.1.3: version "5.3.0" resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" + integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== dependencies: dequal "^2.0.3" array-buffer-byte-length@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" + integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== dependencies: call-bind "^1.0.2" is-array-buffer "^3.0.1" array-flatten@1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== array-flatten@^2.1.2: version "2.1.2" - resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== -array-includes@^3.1.5, array-includes@^3.1.6: +array-includes@^3.1.6: version "3.1.6" - resolved "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.6.tgz#9e9e720e194f198266ba9e18c29e6a9b0e4b225f" + integrity sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4882,11 +5255,24 @@ array-includes@^3.1.5, array-includes@^3.1.6: array-union@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.findlastindex@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.2.tgz#bc229aef98f6bd0533a2bc61ff95209875526c9b" + integrity sha512-tb5thFFlUcp7NdNF6/MpDk/1r/4awWG1FIz3YqDf+/zJSTezBb+/5WViH41obXULHVpDzoiCLpJ/ZO9YbJMsdw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" + get-intrinsic "^1.1.3" array.prototype.flat@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" + integrity sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4895,7 +5281,8 @@ array.prototype.flat@^1.3.1: array.prototype.flatmap@^1.3.1: version "1.3.1" - resolved "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz#1aae7903c2100433cb8261cd4ed310aab5c4a183" + integrity sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4904,7 +5291,8 @@ array.prototype.flatmap@^1.3.1: array.prototype.reduce@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/array.prototype.reduce/-/array.prototype.reduce-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/array.prototype.reduce/-/array.prototype.reduce-1.0.5.tgz#6b20b0daa9d9734dd6bc7ea66b5bbce395471eac" + integrity sha512-kDdugMl7id9COE8R7MHF5jWk7Dqt/fs4Pv+JXoICnYwqpjjjbUurz6w5fT5IG6brLdJhv6/VoHB0H7oyIBXd+Q== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4914,7 +5302,8 @@ array.prototype.reduce@^1.0.5: array.prototype.tosorted@^1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz#ccf44738aa2b5ac56578ffda97c03fd3e23dd532" + integrity sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4922,36 +5311,62 @@ array.prototype.tosorted@^1.1.1: es-shim-unscopables "^1.0.0" get-intrinsic "^1.1.3" +arraybuffer.prototype.slice@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz#9b5ea3868a6eebc30273da577eb888381c0044bb" + integrity sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw== + dependencies: + array-buffer-byte-length "^1.0.0" + call-bind "^1.0.2" + define-properties "^1.2.0" + get-intrinsic "^1.2.1" + is-array-buffer "^3.0.2" + is-shared-array-buffer "^1.0.2" + asap@~2.0.6: version "2.0.6" - resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== ast-types-flow@^0.0.7: version "0.0.7" - resolved "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== async@^3.2.3: version "3.2.4" - resolved "https://registry.npmjs.org/async/-/async-3.2.4.tgz" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynciterator.prototype@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz#8c5df0514936cdd133604dfcc9d3fb93f09b2b62" + integrity sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg== + dependencies: + has-symbols "^1.0.3" asynckit@^0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== at-least-node@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== attr-accept@^2.2.2: version "2.2.2" - resolved "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.2.tgz" + resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b" + integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg== autoprefixer@^10.4.13: - version "10.4.14" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.14.tgz#e28d49902f8e759dd25b153264e862df2705f79d" + version "10.4.15" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.15.tgz#a1230f4aeb3636b89120b34a1f513e2f6834d530" + integrity sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew== dependencies: - browserslist "^4.21.5" - caniuse-lite "^1.0.30001464" + browserslist "^4.21.10" + caniuse-lite "^1.0.30001520" fraction.js "^4.2.0" normalize-range "^0.1.2" picocolors "^1.0.0" @@ -4959,51 +5374,58 @@ autoprefixer@^10.4.13: available-typed-arrays@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== aws-amplify@^5.2.6: - version "5.2.6" - resolved "https://registry.npmjs.org/aws-amplify/-/aws-amplify-5.2.6.tgz" - dependencies: - "@aws-amplify/analytics" "6.2.0" - "@aws-amplify/api" "5.2.1" - "@aws-amplify/auth" "5.4.1" - "@aws-amplify/cache" "5.1.0" - "@aws-amplify/core" "5.4.0" - "@aws-amplify/datastore" "4.5.1" - "@aws-amplify/geo" "2.0.35" - "@aws-amplify/interactions" "5.1.1" - "@aws-amplify/notifications" "1.2.0" - "@aws-amplify/predictions" "5.2.3" - "@aws-amplify/pubsub" "5.2.1" - "@aws-amplify/storage" "5.4.1" + version "5.3.8" + resolved "https://registry.yarnpkg.com/aws-amplify/-/aws-amplify-5.3.8.tgz#91d9fb92a25f0891d0a32395e5992d05d23bdeb0" + integrity sha512-HAYZiHOIgFcuI4WnD1wXNYbhU9fJegMNCzwTnbVSNMX8OpkcjDVaT/Aa/KtAWCERHEkbuWOlDqA+5zy9cKCXAQ== + dependencies: + "@aws-amplify/analytics" "6.5.2" + "@aws-amplify/api" "5.4.2" + "@aws-amplify/auth" "5.6.2" + "@aws-amplify/cache" "5.1.8" + "@aws-amplify/core" "5.8.2" + "@aws-amplify/datastore" "4.7.2" + "@aws-amplify/geo" "2.3.2" + "@aws-amplify/interactions" "5.2.8" + "@aws-amplify/notifications" "1.6.2" + "@aws-amplify/predictions" "5.5.2" + "@aws-amplify/pubsub" "5.5.2" + "@aws-amplify/storage" "5.9.2" tslib "^2.0.0" axe-core@^4.6.2: version "4.7.2" resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" + integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== axios@0.26.0: version "0.26.0" - resolved "https://registry.npmjs.org/axios/-/axios-0.26.0.tgz" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.0.tgz#9a318f1c69ec108f8cd5f3c3d390366635e13928" + integrity sha512-lKoGLMYtHvFrPVt3r+RBMp9nh34N0M8zEfCWqdWZx6phynIEhQqAdydpyBAAG211zlhX9Rgu08cOamy6XjE5Og== dependencies: follow-redirects "^1.14.8" axios@^0.26.1: version "0.26.1" - resolved "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9" + integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA== dependencies: follow-redirects "^1.14.8" axobject-query@^3.1.1: version "3.2.1" resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" + integrity sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg== dependencies: dequal "^2.0.3" babel-jest@^27.4.2, babel-jest@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/babel-jest/-/babel-jest-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== dependencies: "@jest/transform" "^27.5.1" "@jest/types" "^27.5.1" @@ -5016,7 +5438,8 @@ babel-jest@^27.4.2, babel-jest@^27.5.1: babel-loader@^8.2.3: version "8.3.0" - resolved "https://registry.npmjs.org/babel-loader/-/babel-loader-8.3.0.tgz" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.3.0.tgz#124936e841ba4fe8176786d6ff28add1f134d6a8" + integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== dependencies: find-cache-dir "^3.3.1" loader-utils "^2.0.0" @@ -5025,7 +5448,8 @@ babel-loader@^8.2.3: babel-plugin-emotion@^10.0.27: version "10.2.2" - resolved "https://registry.npmjs.org/babel-plugin-emotion/-/babel-plugin-emotion-10.2.2.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-emotion/-/babel-plugin-emotion-10.2.2.tgz#a1fe3503cff80abfd0bdda14abd2e8e57a79d17d" + integrity sha512-SMSkGoqTbTyUTDeuVuPIWifPdUGkTk1Kf9BWRiXIOIcuyMfsdp2EjeiiFvOzX8NOBvEh/ypKYvUh2rkgAJMCLA== dependencies: "@babel/helper-module-imports" "^7.0.0" "@emotion/hash" "0.8.0" @@ -5040,7 +5464,8 @@ babel-plugin-emotion@^10.0.27: babel-plugin-istanbul@^6.1.1: version "6.1.1" - resolved "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" "@istanbuljs/load-nyc-config" "^1.0.0" @@ -5050,7 +5475,8 @@ babel-plugin-istanbul@^6.1.1: babel-plugin-jest-hoist@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== dependencies: "@babel/template" "^7.3.3" "@babel/types" "^7.3.3" @@ -5059,7 +5485,8 @@ babel-plugin-jest-hoist@^27.5.1: babel-plugin-macros@^2.0.0: version "2.8.0" - resolved "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" + integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== dependencies: "@babel/runtime" "^7.7.2" cosmiconfig "^6.0.0" @@ -5067,7 +5494,8 @@ babel-plugin-macros@^2.0.0: babel-plugin-macros@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== dependencies: "@babel/runtime" "^7.12.5" cosmiconfig "^7.0.0" @@ -5075,40 +5503,47 @@ babel-plugin-macros@^3.1.0: babel-plugin-named-asset-import@^0.3.8: version "0.3.8" - resolved "https://registry.npmjs.org/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== -babel-plugin-polyfill-corejs2@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.4.tgz#9f9a0e1cd9d645cc246a5e094db5c3aa913ccd2b" +babel-plugin-polyfill-corejs2@^0.4.5: + version "0.4.5" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.5.tgz#8097b4cb4af5b64a1d11332b6fb72ef5e64a054c" + integrity sha512-19hwUH5FKl49JEsvyTcoHakh6BE0wgXLLptIyKZ3PijHc/Ci521wygORCUCCred+E/twuqRyAkE02BAWPmsHOg== dependencies: "@babel/compat-data" "^7.22.6" - "@babel/helper-define-polyfill-provider" "^0.4.1" - "@nicolo-ribaudo/semver-v6" "^6.3.3" + "@babel/helper-define-polyfill-provider" "^0.4.2" + semver "^6.3.1" -babel-plugin-polyfill-corejs3@^0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.2.tgz#d406c5738d298cd9c66f64a94cf8d5904ce4cc5e" +babel-plugin-polyfill-corejs3@^0.8.3: + version "0.8.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.3.tgz#b4f719d0ad9bb8e0c23e3e630c0c8ec6dd7a1c52" + integrity sha512-z41XaniZL26WLrvjy7soabMXrfPWARN25PZoriDEiLMxAp50AUW3t35BGQUMg5xK3UrpVTtagIDklxYa+MhiNA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.1" + "@babel/helper-define-polyfill-provider" "^0.4.2" core-js-compat "^3.31.0" -babel-plugin-polyfill-regenerator@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.1.tgz#ace7a5eced6dff7d5060c335c52064778216afd3" +babel-plugin-polyfill-regenerator@^0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.2.tgz#80d0f3e1098c080c8b5a65f41e9427af692dc326" + integrity sha512-tAlOptU0Xj34V1Y2PNTL4Y0FOJMDB6bZmoW39FeCQIhigGLkqu3Fj6uiXpxIf6Ij274ENdYx64y6Au+ZKlb1IA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.1" + "@babel/helper-define-polyfill-provider" "^0.4.2" babel-plugin-syntax-jsx@^6.18.0: version "6.18.0" - resolved "https://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" + integrity sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw== babel-plugin-transform-react-remove-prop-types@^0.4.24: version "0.4.24" - resolved "https://registry.npmjs.org/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== babel-preset-current-node-syntax@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== dependencies: "@babel/plugin-syntax-async-generators" "^7.8.4" "@babel/plugin-syntax-bigint" "^7.8.3" @@ -5125,14 +5560,16 @@ babel-preset-current-node-syntax@^1.0.0: babel-preset-jest@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== dependencies: babel-plugin-jest-hoist "^27.5.1" babel-preset-current-node-syntax "^1.0.0" babel-preset-react-app@^10.0.1: version "10.0.1" - resolved "https://registry.npmjs.org/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz" + resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== dependencies: "@babel/core" "^7.16.0" "@babel/plugin-proposal-class-properties" "^7.16.0" @@ -5153,23 +5590,28 @@ babel-preset-react-app@^10.0.1: balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base-64@1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/base-64/-/base-64-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/base-64/-/base-64-1.0.0.tgz#09d0f2084e32a3fd08c2475b973788eee6ae8f4a" + integrity sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg== base64-js@^1.0.2, base64-js@^1.3.1: version "1.5.1" - resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== batch@0.6.1: version "0.6.1" - resolved "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz" + resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== bfj@^7.0.2: version "7.0.2" - resolved "https://registry.npmjs.org/bfj/-/bfj-7.0.2.tgz" + resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== dependencies: bluebird "^3.5.5" check-types "^11.1.1" @@ -5178,19 +5620,23 @@ bfj@^7.0.2: big.js@^5.2.2: version "5.2.2" - resolved "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== binary-extensions@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== bluebird@^3.5.5: version "3.7.2" - resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== body-parser@1.20.1: version "1.20.1" - resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== dependencies: bytes "3.1.2" content-type "~1.0.4" @@ -5208,6 +5654,7 @@ body-parser@1.20.1: bonjour-service@^1.0.11: version "1.1.1" resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.1.1.tgz#960948fa0e0153f5d26743ab15baf8e33752c135" + integrity sha512-Z/5lQRMOG9k7W+FkeGTNjh7htqn/2LMnfOvBZ8pynNZCM9MwkQkI3zeI4oz09uWdcgmgHugVvBqxGg4VQJ5PCg== dependencies: array-flatten "^2.1.2" dns-equal "^1.0.0" @@ -5216,57 +5663,67 @@ bonjour-service@^1.0.11: boolbase@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== bowser@^2.11.0: version "2.11.0" - resolved "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== brace-expansion@^1.1.7: version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" concat-map "0.0.1" brace-expansion@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: balanced-match "^1.0.0" braces@^3.0.2, braces@~3.0.2: version "3.0.2" - resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" browser-process-hrtime@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.21.9: - version "4.21.9" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.9.tgz#e11bdd3c313d7e2a9e87e8b4b0c7872b13897635" +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.10, browserslist@^4.21.4, browserslist@^4.21.9: + version "4.21.10" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.10.tgz#dbbac576628c13d3b2231332cb2ec5a46e015bb0" + integrity sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ== dependencies: - caniuse-lite "^1.0.30001503" - electron-to-chromium "^1.4.431" - node-releases "^2.0.12" + caniuse-lite "^1.0.30001517" + electron-to-chromium "^1.4.477" + node-releases "^2.0.13" update-browserslist-db "^1.0.11" bser@2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== dependencies: node-int64 "^0.4.0" buffer-from@^1.0.0: version "1.1.2" - resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== buffer@4.9.2: version "4.9.2" - resolved "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" @@ -5274,48 +5731,57 @@ buffer@4.9.2: buffer@^5.4.3: version "5.7.1" - resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== dependencies: base64-js "^1.3.1" ieee754 "^1.1.13" builtin-modules@^3.1.0: version "3.3.0" - resolved "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== bytes@3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== bytes@3.1.2: version "3.1.2" - resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== dependencies: function-bind "^1.1.1" get-intrinsic "^1.0.2" callsites@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camel-case@^4.1.2: version "4.1.2" - resolved "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== dependencies: pascal-case "^3.1.2" tslib "^2.0.3" camelcase-css@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== camelcase-keys@6.2.2: version "6.2.2" - resolved "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" + integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg== dependencies: camelcase "^5.3.1" map-obj "^4.0.0" @@ -5323,36 +5789,43 @@ camelcase-keys@6.2.2: camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== camelcase@^6.2.0, camelcase@^6.2.1: version "6.3.0" - resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== can-use-dom@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/can-use-dom/-/can-use-dom-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/can-use-dom/-/can-use-dom-0.1.0.tgz#22cc4a34a0abc43950f42c6411024a3f6366b45a" + integrity sha512-ceOhN1DL7Y4O6M0j9ICgmTYziV89WMd96SvSl0REd8PMgrY0B/WBOPoed5S1KUmJqXgUXh8gzSe6E3ae27upsQ== caniuse-api@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== dependencies: browserslist "^4.0.0" caniuse-lite "^1.0.0" lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: - version "1.0.30001514" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001514.tgz#e2a7e184a23affc9367b7c8d734e7ec4628c1309" +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001517, caniuse-lite@^1.0.30001520: + version "1.0.30001521" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001521.tgz#e9930cf499f7c1e80334b6c1fbca52e00d889e56" + integrity sha512-fnx1grfpEOvDGH+V17eccmNjucGUnCbP6KL+l5KqBIerp26WK/+RQ7CIDE37KGJjaPyqWXXlFUyKiWmvdNNKmQ== case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" - resolved "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz" + resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== -chalk@^2.0.0, chalk@^2.4.1: +chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" @@ -5360,33 +5833,39 @@ chalk@^2.0.0, chalk@^2.4.1: chalk@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" char-regex@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== char-regex@^2.0.0: version "2.0.1" - resolved "https://registry.npmjs.org/char-regex/-/char-regex-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== check-types@^11.1.1: version "11.2.2" - resolved "https://registry.npmjs.org/check-types/-/check-types-11.2.2.tgz" + resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.2.2.tgz#7afc0b6a860d686885062f2dba888ba5710335b4" + integrity sha512-HBiYvXvn9Z70Z88XKjz3AEKd4HJhBXsa3j7xFnITAzoS8+q6eIGi8qDB8FKPBAjtuxjI/zFpwuiCb8oDtKOYrA== chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" - resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== dependencies: anymatch "~3.1.2" braces "~3.0.2" @@ -5400,29 +5879,35 @@ chokidar@^3.4.2, chokidar@^3.5.3: chrome-trace-event@^1.0.2: version "1.0.3" - resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== ci-info@^3.2.0: version "3.8.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" + integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== cjs-module-lexer@^1.0.0: version "1.2.3" resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" + integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== classnames@^2.3.1: version "2.3.2" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" + integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== clean-css@^5.2.2: version "5.3.2" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.2.tgz#70ecc7d4d4114921f5d298349ff86a31a9975224" + integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== dependencies: source-map "~0.6.0" cliui@^7.0.2: version "7.0.4" - resolved "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== dependencies: string-width "^4.2.0" strip-ansi "^6.0.0" @@ -5430,95 +5915,120 @@ cliui@^7.0.2: clsx@^1.1.0, clsx@^1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" + integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== + +clsx@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.0.0.tgz#12658f3fd98fafe62075595a5c30e43d18f3d00b" + integrity sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q== co@^4.6.0: version "4.6.0" - resolved "https://registry.npmjs.org/co/-/co-4.6.0.tgz" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== coa@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== dependencies: "@types/q" "^1.5.1" chalk "^2.4.1" q "^1.1.2" collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz" + version "1.0.2" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" + integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== colord@^2.9.1: version "2.9.3" - resolved "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== colorette@^2.0.10: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== combined-stream@^1.0.8: version "1.0.8" - resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@^2.20.0, commander@^2.20.3: version "2.20.3" - resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@^4.0.0: version "4.1.1" - resolved "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== commander@^7.2.0: version "7.2.0" - resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== commander@^8.3.0: version "8.3.0" - resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== common-path-prefix@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== common-tags@^1.8.0: version "1.8.2" - resolved "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== commondir@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== compressible@~2.0.16: version "2.0.18" - resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== dependencies: mime-db ">= 1.43.0 < 2" compression@^1.7.4: version "1.7.4" - resolved "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== dependencies: accepts "~1.3.5" bytes "3.0.0" @@ -5530,69 +6040,84 @@ compression@^1.7.4: concat-map@0.0.1: version "0.0.1" - resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== confusing-browser-globals@^1.0.11: version "1.0.11" - resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== connect-history-api-fallback@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== content-disposition@0.5.4: version "0.5.4" - resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== dependencies: safe-buffer "5.2.1" content-type@~1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: version "1.9.0" - resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== cookie-signature@1.0.6: version "1.0.6" - resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== cookie@0.5.0: version "0.5.0" - resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== cookie@^0.4.0: version "0.4.2" - resolved "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== copy-to-clipboard@^3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz#55ac43a1db8ae639a4bd99511c148cdd1b83a1b0" + integrity sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA== dependencies: toggle-selection "^1.0.6" core-js-compat@^3.31.0: - version "3.31.1" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.31.1.tgz#5084ad1a46858df50ff89ace152441a63ba7aae0" + version "3.32.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.32.0.tgz#f41574b6893ab15ddb0ac1693681bd56c8550a90" + integrity sha512-7a9a3D1k4UCVKnLhrgALyFcP7YCsLOQIxPd0dKjf/6GuPcgyiGP70ewWdCGrSK7evyhymi0qO4EqCmSJofDeYw== dependencies: browserslist "^4.21.9" core-js-pure@^3.23.3: - version "3.31.1" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.31.1.tgz#73d154958881873bc19381df80bddb20c8d0cdb5" + version "3.32.0" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.32.0.tgz#5d79f85da7a4373e9a06494ccbef995a4c639f8b" + integrity sha512-qsev1H+dTNYpDUEURRuOXMvpdtAnNEvQWS/FMJ2Vb5AY8ZP4rAPQldkE27joykZPJTe0+IVgHZYh1P5Xu1/i1g== core-js@^3.0.1, core-js@^3.19.2: - version "3.31.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.31.1.tgz#f2b0eea9be9da0def2c5fece71064a7e5d687653" + version "3.32.0" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.32.0.tgz#7643d353d899747ab1f8b03d2803b0312a0fb3b6" + integrity sha512-rd4rYZNlF3WuoYuRIDEmbR/ga9CeuWX9U05umAvgrrZoHY4Z++cp/xwPQMvUpBB4Ag6J8KfD80G0zwCyaSxDww== core-util-is@~1.0.0: version "1.0.3" - resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== cosmiconfig@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== dependencies: "@types/parse-json" "^4.0.0" import-fresh "^3.1.0" @@ -5602,7 +6127,8 @@ cosmiconfig@^6.0.0: cosmiconfig@^7.0.0: version "7.1.0" - resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== dependencies: "@types/parse-json" "^4.0.0" import-fresh "^3.2.1" @@ -5612,19 +6138,22 @@ cosmiconfig@^7.0.0: cross-env@^5.2.0: version "5.2.1" - resolved "https://registry.npmjs.org/cross-env/-/cross-env-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-5.2.1.tgz#b2c76c1ca7add66dc874d11798466094f551b34d" + integrity sha512-1yHhtcfAd1r4nwQgknowuUNfIT9E8dOMMspC36g45dN+iD1blloi7xp8X/xAIDnjHWyt1uQ8PHk2fkNaym7soQ== dependencies: cross-spawn "^6.0.5" cross-fetch@^3.0.4, cross-fetch@^3.1.5: - version "3.1.6" - resolved "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.6.tgz" + version "3.1.8" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82" + integrity sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg== dependencies: - node-fetch "^2.6.11" + node-fetch "^2.6.12" cross-spawn@^6.0.5: version "6.0.5" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== dependencies: nice-try "^1.0.4" path-key "^2.0.1" @@ -5634,7 +6163,8 @@ cross-spawn@^6.0.5: cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" @@ -5642,27 +6172,32 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: crypto-random-string@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== css-blank-pseudo@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== dependencies: postcss-selector-parser "^6.0.9" css-declaration-sorter@^6.3.1: version "6.4.1" resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.4.1.tgz#28beac7c20bad7f1775be3a7129d7eae409a3a71" + integrity sha512-rtdthzxKuyq6IzqX6jEcIzQF/YqccluefyCYheovBOLhFT/drQA9zj/UbRAa9J7C0o6EG6u3E6g+vKkay7/k3g== css-has-pseudo@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== dependencies: postcss-selector-parser "^6.0.9" css-loader@^6.5.1: version "6.8.1" resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.8.1.tgz#0f8f52699f60f5e679eab4ec0fcd68b8e8a50a88" + integrity sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g== dependencies: icss-utils "^5.1.0" postcss "^8.4.21" @@ -5675,7 +6210,8 @@ css-loader@^6.5.1: css-minimizer-webpack-plugin@^3.2.0: version "3.4.1" - resolved "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== dependencies: cssnano "^5.0.6" jest-worker "^27.0.2" @@ -5686,15 +6222,18 @@ css-minimizer-webpack-plugin@^3.2.0: css-prefers-color-scheme@^6.0.3: version "6.0.3" - resolved "https://registry.npmjs.org/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz" + resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== css-select-base-adapter@^0.1.1: version "0.1.1" - resolved "https://registry.npmjs.org/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz" + resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== css-select@^2.0.0: version "2.1.0" - resolved "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== dependencies: boolbase "^1.0.0" css-what "^3.2.1" @@ -5703,7 +6242,8 @@ css-select@^2.0.0: css-select@^4.1.3: version "4.3.0" - resolved "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== dependencies: boolbase "^1.0.0" css-what "^6.0.1" @@ -5713,52 +6253,62 @@ css-select@^4.1.3: css-tree@1.0.0-alpha.37: version "1.0.0-alpha.37" - resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== dependencies: mdn-data "2.0.4" source-map "^0.6.1" css-tree@^1.1.2, css-tree@^1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== dependencies: mdn-data "2.0.14" source-map "^0.6.1" css-vendor@^2.0.8: version "2.0.8" - resolved "https://registry.npmjs.org/css-vendor/-/css-vendor-2.0.8.tgz" + resolved "https://registry.yarnpkg.com/css-vendor/-/css-vendor-2.0.8.tgz#e47f91d3bd3117d49180a3c935e62e3d9f7f449d" + integrity sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ== dependencies: "@babel/runtime" "^7.8.3" is-in-browser "^1.0.2" css-what@^3.2.1: version "3.4.2" - resolved "https://registry.npmjs.org/css-what/-/css-what-3.4.2.tgz" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== css-what@^6.0.1: version "6.1.0" - resolved "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== css.escape@^1.5.1: version "1.5.1" - resolved "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz" + resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== cssdb@^7.1.0: - version "7.6.0" - resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-7.6.0.tgz#beac8f7a5f676db62d3c33da517ef4c9eb008f8b" + version "7.7.0" + resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-7.7.0.tgz#8a62f1c825c019134e7830729f050c29e3eca95e" + integrity sha512-1hN+I3r4VqSNQ+OmMXxYexnumbOONkSil0TWMebVXHtzYW4tRRPovUNHPHj2d4nrgOuYJ8Vs3XwvywsuwwXNNA== cssesc@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== cssfilter@0.0.10: version "0.0.10" - resolved "https://registry.npmjs.org/cssfilter/-/cssfilter-0.0.10.tgz" + resolved "https://registry.yarnpkg.com/cssfilter/-/cssfilter-0.0.10.tgz#c6d2672632a2e5c83e013e6864a42ce8defd20ae" + integrity sha512-FAaLDaplstoRsDR8XGYH51znUN0UY7nMc6Z9/fvE8EXGwvJE9hu7W2vHwx1+bd6gCYnln9nLbzxFTrcO9YQDZw== cssnano-preset-default@^5.2.14: version "5.2.14" resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz#309def4f7b7e16d71ab2438052093330d9ab45d8" + integrity sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A== dependencies: css-declaration-sorter "^6.3.1" cssnano-utils "^3.1.0" @@ -5792,11 +6342,13 @@ cssnano-preset-default@^5.2.14: cssnano-utils@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== cssnano@^5.0.6: version "5.1.15" resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.15.tgz#ded66b5480d5127fcb44dac12ea5a983755136bf" + integrity sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw== dependencies: cssnano-preset-default "^5.2.14" lilconfig "^2.0.3" @@ -5804,39 +6356,47 @@ cssnano@^5.0.6: csso@^4.0.2, csso@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== dependencies: css-tree "^1.1.2" cssom@^0.4.4: version "0.4.4" - resolved "https://registry.npmjs.org/cssom/-/cssom-0.4.4.tgz" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== cssom@~0.3.6: version "0.3.8" - resolved "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== cssstyle@^2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== dependencies: cssom "~0.3.6" csstype@^2.5.7: version "2.6.21" resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.21.tgz#2efb85b7cc55c80017c66a5ad7cbd931fda3a90e" + integrity sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w== csstype@^3.0.2, csstype@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b" + integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ== damerau-levenshtein@^1.0.8: version "1.0.8" - resolved "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== data-urls@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== dependencies: abab "^2.0.3" whatwg-mimetype "^2.3.0" @@ -5845,47 +6405,55 @@ data-urls@^2.0.0: date-fns@^2.28.0: version "2.30.0" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0" + integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw== dependencies: "@babel/runtime" "^7.21.0" dayjs@^1.11.0, dayjs@^1.11.7: - version "1.11.8" - resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.8.tgz" + version "1.11.9" + resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.9.tgz#9ca491933fadd0a60a2c19f6c237c03517d71d1a" + integrity sha512-QvzAURSbQ0pKdIye2txOzNaHmxtUBXerpY0FJsFXUMKbIZeFm5ht1LS/jFsrncjnmtv8HsG0W2g6c0zUjZWmpA== debug@2.6.9, debug@^2.6.0: version "2.6.9" - resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" - resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" debug@^3.2.7: version "3.2.7" - resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" decimal.js@^10.2.1: version "10.4.3" - resolved "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.3.tgz#1044092884d245d1b7f65725fa4ad4c6f781cc23" + integrity sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA== dedent@^0.7.0: version "0.7.0" - resolved "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== deep-equal@^2.0.5: - version "2.2.1" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.1.tgz#c72ab22f3a7d3503a4ca87dde976fe9978816739" + version "2.2.2" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.2.tgz#9b2635da569a13ba8e1cc159c2f744071b115daa" + integrity sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA== dependencies: array-buffer-byte-length "^1.0.0" call-bind "^1.0.2" es-get-iterator "^1.1.3" - get-intrinsic "^1.2.0" + get-intrinsic "^1.2.1" is-arguments "^1.1.1" is-array-buffer "^3.0.2" is-date-object "^1.0.5" @@ -5901,141 +6469,169 @@ deep-equal@^2.0.5: which-collection "^1.0.1" which-typed-array "^1.1.9" -deep-is@^0.1.3, deep-is@~0.1.3: +deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== deepmerge@^2.1.1: version "2.2.1" - resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-2.2.1.tgz#5d3ff22a01c00f645405a2fbc17d0778a1801170" + integrity sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA== deepmerge@^4.2.2: version "4.3.1" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== default-gateway@^6.0.3: version "6.0.3" - resolved "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== dependencies: execa "^5.0.0" define-lazy-prop@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" + integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== dependencies: has-property-descriptors "^1.0.0" object-keys "^1.1.1" delayed-stream@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== depd@2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== depd@~1.1.2: version "1.1.2" - resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== destroy@1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== detect-newline@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== detect-node@^2.0.4: version "2.1.0" - resolved "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== detect-port-alt@^1.1.6: version "1.1.6" - resolved "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz" + resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== dependencies: address "^1.0.1" debug "^2.6.0" didyoumean@^1.2.2: version "1.2.2" - resolved "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== diff-sequences@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== diff-sequences@^29.4.3: version "29.4.3" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.4.3.tgz#9314bc1fabe09267ffeca9cbafc457d8499a13f2" + integrity sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA== dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" dlv@^1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== dns-equal@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== dns-packet@^5.2.2: version "5.6.0" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.0.tgz#2202c947845c7a63c23ece58f2f70ff6ab4c2f7d" + integrity sha512-rza3UH1LwdHh9qyPXp8lkwpjSNk/AMD3dPytUoRoqnypDUhY0xvbdmVhWOfxO68frEfV9BU8V12Ez7ZsHGZpCQ== dependencies: "@leichtgewicht/ip-codec" "^2.0.1" doctrine@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== dependencies: esutils "^2.0.2" doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: version "0.5.16" resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453" + integrity sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg== dom-converter@^0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== dependencies: utila "~0.4" dom-helpers@^5.0.1: version "5.2.1" - resolved "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" + integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA== dependencies: "@babel/runtime" "^7.8.7" csstype "^3.0.2" dom-serializer@0: version "0.2.2" - resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== dependencies: domelementtype "^2.0.1" entities "^2.0.0" dom-serializer@^1.0.1: version "1.4.1" - resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== dependencies: domelementtype "^2.0.1" domhandler "^4.2.0" @@ -6043,34 +6639,40 @@ dom-serializer@^1.0.1: domelementtype@1: version "1.3.1" - resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1, domelementtype@^2.2.0: version "2.3.0" - resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== domexception@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== dependencies: webidl-conversions "^5.0.0" domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: version "4.3.1" - resolved "https://registry.npmjs.org/domhandler/-/domhandler-4.3.1.tgz" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== dependencies: domelementtype "^2.2.0" domutils@^1.7.0: version "1.7.0" - resolved "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== dependencies: dom-serializer "0" domelementtype "1" domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" - resolved "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== dependencies: dom-serializer "^1.0.1" domelementtype "^2.2.0" @@ -6078,78 +6680,94 @@ domutils@^2.5.2, domutils@^2.8.0: dot-case@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== dependencies: no-case "^3.0.4" tslib "^2.0.3" dotenv-expand@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== dotenv@^10.0.0: version "10.0.0" - resolved "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== downshift@^1.31.2: version "1.31.16" - resolved "https://registry.npmjs.org/downshift/-/downshift-1.31.16.tgz" + resolved "https://registry.yarnpkg.com/downshift/-/downshift-1.31.16.tgz#acd81631539502d4112d01bd573654419fd9f640" + integrity sha512-RskXmiGSoz0EHAyBrmTBGSLHg6+NYDGuLu2W3GpmuOe6hmZEWhCiQrq5g6DWzhnUaJD41xHbbfC6j1Fe86YqgA== duplexer@^0.1.2: version "0.1.2" - resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== echarts-for-react@^3.0.2: version "3.0.2" - resolved "https://registry.npmjs.org/echarts-for-react/-/echarts-for-react-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/echarts-for-react/-/echarts-for-react-3.0.2.tgz#ac5859157048a1066d4553e34b328abb24f2b7c1" + integrity sha512-DRwIiTzx8JfwPOVgGttDytBqdp5VzCSyMRIxubgU/g2n9y3VLUmF2FK7Icmg/sNVkv4+rktmrLN9w22U2yy3fA== dependencies: fast-deep-equal "^3.1.3" size-sensor "^1.0.1" echarts@^5.3.3: - version "5.4.2" - resolved "https://registry.npmjs.org/echarts/-/echarts-5.4.2.tgz" + version "5.4.3" + resolved "https://registry.yarnpkg.com/echarts/-/echarts-5.4.3.tgz#f5522ef24419164903eedcfd2b506c6fc91fb20c" + integrity sha512-mYKxLxhzy6zyTi/FaEbJMOZU1ULGEQHaeIeuMR5L+JnJTpz+YR03mnnpBhbR4+UYJAgiXgpyTVLffPAjOTLkZA== dependencies: tslib "2.3.0" - zrender "5.4.3" + zrender "5.4.4" ee-first@1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== ejs@^3.1.6: version "3.1.9" resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" + integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== dependencies: jake "^10.8.5" -electron-to-chromium@^1.4.431: - version "1.4.454" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.454.tgz#774dc7cb5e58576d0125939ec34a4182f3ccc87d" +electron-to-chromium@^1.4.477: + version "1.4.494" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.494.tgz#588f7a3d19d32a31f3a7e05d81b61d95d25b1555" + integrity sha512-KF7wtsFFDu4ws1ZsSOt4pdmO1yWVNWCFtijVYZPUeW4SV7/hy/AESjLn/+qIWgq7mHscNOKAwN5AIM1+YAy+Ww== emittery@^0.10.2: version "0.10.2" - resolved "https://registry.npmjs.org/emittery/-/emittery-0.10.2.tgz" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== emittery@^0.8.1: version "0.8.1" - resolved "https://registry.npmjs.org/emittery/-/emittery-0.8.1.tgz" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== emoji-regex@^9.2.2: version "9.2.2" - resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== emojis-list@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== emotion-theming@^10.0.27: version "10.3.0" - resolved "https://registry.npmjs.org/emotion-theming/-/emotion-theming-10.3.0.tgz" + resolved "https://registry.yarnpkg.com/emotion-theming/-/emotion-theming-10.3.0.tgz#7f84d7099581d7ffe808aab5cd870e30843db72a" + integrity sha512-mXiD2Oj7N9b6+h/dC6oLf9hwxbtKHQjoIqtodEyL8CpkN4F3V4IK/BT4D0C7zSs4BBFOu4UlPJbvvBLa88SGEA== dependencies: "@babel/runtime" "^7.5.5" "@emotion/weak-memoize" "0.2.5" @@ -6157,49 +6775,57 @@ emotion-theming@^10.0.27: encodeurl@~1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== enhanced-resolve@^5.15.0: version "5.15.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" + integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" entities@2.2.0, entities@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== env-cmd@^10.1.0: version "10.1.0" - resolved "https://registry.npmjs.org/env-cmd/-/env-cmd-10.1.0.tgz" + resolved "https://registry.yarnpkg.com/env-cmd/-/env-cmd-10.1.0.tgz#c7f5d3b550c9519f137fdac4dd8fb6866a8c8c4b" + integrity sha512-mMdWTT9XKN7yNth/6N6g2GuKuJTsKMDHlQFUDacb/heQRRWOTIZ42t1rMHnQu4jYxU1ajdTeJM+9eEETlqToMA== dependencies: commander "^4.0.0" cross-spawn "^7.0.0" error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" error-stack-parser@^2.0.6: version "2.1.4" - resolved "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.1.4.tgz" + resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== dependencies: stackframe "^1.3.4" -es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.20.4, es-abstract@^1.21.2: - version "1.21.2" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.21.2.tgz#a56b9695322c8a185dc25975aa3b8ec31d0e7eff" +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.20.4, es-abstract@^1.21.2, es-abstract@^1.21.3: + version "1.22.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.1.tgz#8b4e5fc5cefd7f1660f0f8e1a52900dfbc9d9ccc" + integrity sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw== dependencies: array-buffer-byte-length "^1.0.0" + arraybuffer.prototype.slice "^1.0.1" available-typed-arrays "^1.0.5" call-bind "^1.0.2" es-set-tostringtag "^2.0.1" es-to-primitive "^1.2.1" function.prototype.name "^1.1.5" - get-intrinsic "^1.2.0" + get-intrinsic "^1.2.1" get-symbol-description "^1.0.0" globalthis "^1.0.3" gopd "^1.0.1" @@ -6219,22 +6845,28 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.20.4, es-abstract@^1.21 object-inspect "^1.12.3" object-keys "^1.1.1" object.assign "^4.1.4" - regexp.prototype.flags "^1.4.3" + regexp.prototype.flags "^1.5.0" + safe-array-concat "^1.0.0" safe-regex-test "^1.0.0" string.prototype.trim "^1.2.7" string.prototype.trimend "^1.0.6" string.prototype.trimstart "^1.0.6" + typed-array-buffer "^1.0.0" + typed-array-byte-length "^1.0.0" + typed-array-byte-offset "^1.0.0" typed-array-length "^1.0.4" unbox-primitive "^1.0.2" - which-typed-array "^1.1.9" + which-typed-array "^1.1.10" es-array-method-boxes-properly@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== es-get-iterator@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6" + integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw== dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.3" @@ -6246,13 +6878,35 @@ es-get-iterator@^1.1.3: isarray "^2.0.5" stop-iteration-iterator "^1.0.0" +es-iterator-helpers@^1.0.12: + version "1.0.13" + resolved "https://registry.yarnpkg.com/es-iterator-helpers/-/es-iterator-helpers-1.0.13.tgz#72101046ffc19baf9996adc70e6177a26e6e8084" + integrity sha512-LK3VGwzvaPWobO8xzXXGRUOGw8Dcjyfk62CsY/wfHN75CwsJPbuypOYJxK6g5RyEL8YDjIWcl6jgd8foO6mmrA== + dependencies: + asynciterator.prototype "^1.0.0" + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.21.3" + es-set-tostringtag "^2.0.1" + function-bind "^1.1.1" + get-intrinsic "^1.2.1" + globalthis "^1.0.3" + has-property-descriptors "^1.0.0" + has-proto "^1.0.1" + has-symbols "^1.0.3" + internal-slot "^1.0.5" + iterator.prototype "^1.1.0" + safe-array-concat "^1.0.0" + es-module-lexer@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" + integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== es-set-tostringtag@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" + integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== dependencies: get-intrinsic "^1.1.3" has "^1.0.3" @@ -6260,13 +6914,15 @@ es-set-tostringtag@^2.0.1: es-shim-unscopables@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== dependencies: has "^1.0.3" es-to-primitive@^1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" is-date-object "^1.0.1" @@ -6274,38 +6930,49 @@ es-to-primitive@^1.2.1: escalade@^3.1.1: version "3.1.1" - resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" - resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== escodegen@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz" + version "2.1.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.1.0.tgz#ba93bbb7a43986d29d6041f99f5262da773e2e17" + integrity sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w== dependencies: esprima "^4.0.1" estraverse "^5.2.0" esutils "^2.0.2" - optionator "^0.8.1" optionalDependencies: source-map "~0.6.1" +eslint-config-prettier@^8.8.0: + version "8.10.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz#3a06a662130807e2502fc3ff8b4143d8a0658e11" + integrity sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg== + eslint-config-react-app@^7.0.1: version "7.0.1" - resolved "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz" + resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== dependencies: "@babel/core" "^7.16.0" "@babel/eslint-parser" "^7.16.3" @@ -6322,56 +6989,70 @@ eslint-config-react-app@^7.0.1: eslint-plugin-react-hooks "^4.3.0" eslint-plugin-testing-library "^5.0.1" +eslint-import-resolver-alias@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-alias/-/eslint-import-resolver-alias-1.1.2.tgz#297062890e31e4d6651eb5eba9534e1f6e68fc97" + integrity sha512-WdviM1Eu834zsfjHtcGHtGfcu+F30Od3V7I9Fi57uhBEwPkjDcii7/yW8jAT+gOhn4P/vOxxNAXbFAKsrrc15w== + eslint-import-resolver-node@^0.3.7: - version "0.3.7" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz#83b375187d412324a1963d84fa664377a23eb4d7" + version "0.3.9" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" + integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== dependencies: debug "^3.2.7" - is-core-module "^2.11.0" - resolve "^1.22.1" + is-core-module "^2.13.0" + resolve "^1.22.4" -eslint-module-utils@^2.7.4: +eslint-module-utils@^2.8.0: version "2.8.0" resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49" + integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw== dependencies: debug "^3.2.7" eslint-plugin-flowtype@^8.0.3: version "8.0.3" - resolved "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz" + resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== dependencies: lodash "^4.17.21" string-natural-compare "^3.0.1" -eslint-plugin-import@^2.25.3: - version "2.27.5" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz#876a6d03f52608a3e5bb439c2550588e51dd6c65" +eslint-plugin-import@^2.25.3, eslint-plugin-import@^2.27.5: + version "2.28.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.28.0.tgz#8d66d6925117b06c4018d491ae84469eb3cb1005" + integrity sha512-B8s/n+ZluN7sxj9eUf7/pRFERX0r5bnFA2dCaLHy2ZeaQEAz0k+ZZkFWRFHJAqxfxQDx6KLv9LeIki7cFdwW+Q== dependencies: array-includes "^3.1.6" + array.prototype.findlastindex "^1.2.2" array.prototype.flat "^1.3.1" array.prototype.flatmap "^1.3.1" debug "^3.2.7" doctrine "^2.1.0" eslint-import-resolver-node "^0.3.7" - eslint-module-utils "^2.7.4" + eslint-module-utils "^2.8.0" has "^1.0.3" - is-core-module "^2.11.0" + is-core-module "^2.12.1" is-glob "^4.0.3" minimatch "^3.1.2" + object.fromentries "^2.0.6" + object.groupby "^1.0.0" object.values "^1.1.6" - resolve "^1.22.1" - semver "^6.3.0" - tsconfig-paths "^3.14.1" + resolve "^1.22.3" + semver "^6.3.1" + tsconfig-paths "^3.14.2" eslint-plugin-jest@^25.3.0: version "25.7.0" - resolved "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== dependencies: "@typescript-eslint/experimental-utils" "^5.0.0" eslint-plugin-jsx-a11y@^6.5.1: version "6.7.1" resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz#fca5e02d115f48c9a597a6894d5bcec2f7a76976" + integrity sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA== dependencies: "@babel/runtime" "^7.20.7" aria-query "^5.1.3" @@ -6390,18 +7071,28 @@ eslint-plugin-jsx-a11y@^6.5.1: object.fromentries "^2.0.6" semver "^6.3.0" +eslint-plugin-prettier@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz#651cbb88b1dab98bfd42f017a12fa6b2d993f94b" + integrity sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ== + dependencies: + prettier-linter-helpers "^1.0.0" + eslint-plugin-react-hooks@^4.3.0: version "4.6.0" - resolved "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== eslint-plugin-react@^7.27.1: - version "7.32.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz#e71f21c7c265ebce01bcbc9d0955170c55571f10" + version "7.33.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz#69ee09443ffc583927eafe86ffebb470ee737608" + integrity sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw== dependencies: array-includes "^3.1.6" array.prototype.flatmap "^1.3.1" array.prototype.tosorted "^1.1.1" doctrine "^2.1.0" + es-iterator-helpers "^1.0.12" estraverse "^5.3.0" jsx-ast-utils "^2.4.1 || ^3.0.0" minimatch "^3.1.2" @@ -6411,40 +7102,46 @@ eslint-plugin-react@^7.27.1: object.values "^1.1.6" prop-types "^15.8.1" resolve "^2.0.0-next.4" - semver "^6.3.0" + semver "^6.3.1" string.prototype.matchall "^4.0.8" eslint-plugin-testing-library@^5.0.1: - version "5.11.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.11.0.tgz#0bad7668e216e20dd12f8c3652ca353009163121" + version "5.11.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.11.1.tgz#5b46cdae96d4a78918711c0b4792f90088e62d20" + integrity sha512-5eX9e1Kc2PqVRed3taaLnAAqPZGEX75C+M/rXzUAI3wIg/ZxzUm1OVAwfe/O+vE+6YXOLetSe9g5GKD2ecXipw== dependencies: "@typescript-eslint/utils" "^5.58.0" eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== dependencies: esrecurse "^4.3.0" estraverse "^4.1.1" -eslint-scope@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.0.tgz#f21ebdafda02352f103634b96dd47d9f81ca117b" +eslint-scope@^7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== dependencies: esrecurse "^4.3.0" estraverse "^5.2.0" eslint-visitor-keys@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz#c22c48f48942d08ca824cc526211ae400478a994" +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== eslint-webpack-plugin@^3.1.1: version "3.2.0" - resolved "https://registry.npmjs.org/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz" + resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" + integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== dependencies: "@types/eslint" "^7.29.0 || ^8.4.1" jest-worker "^28.0.2" @@ -6453,25 +7150,26 @@ eslint-webpack-plugin@^3.1.1: schema-utils "^4.0.0" eslint@^8.3.0: - version "8.44.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.44.0.tgz#51246e3889b259bbcd1d7d736a0c10add4f0e500" + version "8.47.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.47.0.tgz#c95f9b935463fb4fad7005e626c7621052e90806" + integrity sha512-spUQWrdPt+pRVP1TTJLmfRNJJHHZryFmptzcafwSvHsceV81djHOdnEeDmkdotZyLNjDhrOasNK8nikkoG1O8Q== dependencies: "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.4.0" - "@eslint/eslintrc" "^2.1.0" - "@eslint/js" "8.44.0" + "@eslint-community/regexpp" "^4.6.1" + "@eslint/eslintrc" "^2.1.2" + "@eslint/js" "^8.47.0" "@humanwhocodes/config-array" "^0.11.10" "@humanwhocodes/module-importer" "^1.0.1" "@nodelib/fs.walk" "^1.2.8" - ajv "^6.10.0" + ajv "^6.12.4" chalk "^4.0.0" cross-spawn "^7.0.2" debug "^4.3.2" doctrine "^3.0.0" escape-string-regexp "^4.0.0" - eslint-scope "^7.2.0" - eslint-visitor-keys "^3.4.1" - espree "^9.6.0" + eslint-scope "^7.2.2" + eslint-visitor-keys "^3.4.3" + espree "^9.6.1" esquery "^1.4.2" esutils "^2.0.2" fast-deep-equal "^3.1.3" @@ -6481,7 +7179,6 @@ eslint@^8.3.0: globals "^13.19.0" graphemer "^1.4.0" ignore "^5.2.0" - import-fresh "^3.0.0" imurmurhash "^0.1.4" is-glob "^4.0.0" is-path-inside "^3.0.3" @@ -6493,12 +7190,12 @@ eslint@^8.3.0: natural-compare "^1.4.0" optionator "^0.9.3" strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" text-table "^0.2.0" -espree@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.0.tgz#80869754b1c6560f32e3b6929194a3fe07c5b82f" +espree@^9.6.0, espree@^9.6.1: + version "9.6.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: acorn "^8.9.0" acorn-jsx "^5.3.2" @@ -6506,51 +7203,69 @@ espree@^9.6.0: esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.2: version "1.5.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" estraverse@^4.1.1: version "4.3.0" - resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: version "5.3.0" - resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== estree-walker@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/estree-walker/-/estree-walker-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== esutils@^2.0.2: version "2.0.3" - resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" - resolved "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== eventemitter3@^4.0.0: version "4.0.7" - resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.1.0, events@^3.2.0: version "3.3.0" - resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +exec-sh@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.2.tgz#2a5e7ffcbd7d0ba2755bdecb16e5a427dfbdec36" + integrity sha512-FIUCJz1RbuS0FKTdaAafAByGS0CPvU3R0MeHxgtl+djzCc//F8HakL8GzmVNZanasTbTAY/3DRFA0KpVqj/eAw== + dependencies: + merge "^1.2.0" execa@^5.0.0: version "5.1.1" - resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: cross-spawn "^7.0.3" get-stream "^6.0.0" @@ -6564,11 +7279,13 @@ execa@^5.0.0: exit@^0.1.2: version "0.1.2" - resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== expect@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/expect/-/expect-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== dependencies: "@jest/types" "^27.5.1" jest-get-type "^27.5.1" @@ -6576,19 +7293,21 @@ expect@^27.5.1: jest-message-util "^27.5.1" expect@^29.0.0: - version "29.6.1" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.6.1.tgz#64dd1c8f75e2c0b209418f2b8d36a07921adfdf1" + version "29.6.2" + resolved "https://registry.yarnpkg.com/expect/-/expect-29.6.2.tgz#7b08e83eba18ddc4a2cf62b5f2d1918f5cd84521" + integrity sha512-iAErsLxJ8C+S02QbLAwgSGSezLQK+XXRDt8IuFXFpwCNw2ECmzZSmjKcCaFVp5VRMk+WAvz6h6jokzEzBFZEuA== dependencies: - "@jest/expect-utils" "^29.6.1" + "@jest/expect-utils" "^29.6.2" "@types/node" "*" jest-get-type "^29.4.3" - jest-matcher-utils "^29.6.1" - jest-message-util "^29.6.1" - jest-util "^29.6.1" + jest-matcher-utils "^29.6.2" + jest-message-util "^29.6.2" + jest-util "^29.6.2" express@^4.17.3: version "4.18.2" - resolved "https://registry.npmjs.org/express/-/express-4.18.2.tgz" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" @@ -6624,15 +7343,23 @@ express@^4.17.3: fast-base64-decode@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/fast-base64-decode/-/fast-base64-decode-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/fast-base64-decode/-/fast-base64-decode-1.0.0.tgz#b434a0dd7d92b12b43f26819300d2dafb83ee418" + integrity sha512-qwaScUgUGBYeDNRnbc/KyllVU88Jk1pRHPStuF/lO7B0/RTRLj7U0lkdTAutlBblY08rwZDff6tNU9cjv6j//Q== fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-diff@^1.1.2: + version "1.3.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0" + integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw== fast-glob@^3.2.12, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz" + version "3.3.1" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" + integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -6642,49 +7369,65 @@ fast-glob@^3.2.12, fast-glob@^3.2.9: fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: +fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -fast-xml-parser@4.2.4: - version "4.2.4" - resolved "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.4.tgz" +fast-xml-parser@4.2.5: + version "4.2.5" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f" + integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== + dependencies: + strnum "^1.0.5" + +fast-xml-parser@^4.2.5: + version "4.2.7" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.7.tgz#871f2ca299dc4334b29f8da3658c164e68395167" + integrity sha512-J8r6BriSLO1uj2miOk1NW0YVm8AGOOu3Si2HQp/cSmo6EA4m3fcwu2WKjJ4RK9wMLBtg69y1kS8baDiQBR41Ig== dependencies: strnum "^1.0.5" fastq@^1.6.0: version "1.15.0" - resolved "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== dependencies: reusify "^1.0.4" faye-websocket@^0.11.3: version "0.11.4" - resolved "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz" + resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== dependencies: websocket-driver ">=0.5.1" fb-watchman@^2.0.0: version "2.0.2" - resolved "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== dependencies: bser "2.1.1" fflate@0.7.3: version "0.7.3" - resolved "https://registry.npmjs.org/fflate/-/fflate-0.7.3.tgz" + resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.7.3.tgz#288b034ff0e9c380eaa2feff48c787b8371b7fa5" + integrity sha512-0Zz1jOzJWERhyhsimS54VTqOteCNwRtIlh8isdL0AXLo0g7xNTfTL7oWrkmCnPhZGocKIkWHBistBrrpoNH3aw== file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" file-loader@^6.2.0: version "6.2.0" - resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== dependencies: loader-utils "^2.0.0" schema-utils "^3.0.0" @@ -6692,28 +7435,33 @@ file-loader@^6.2.0: file-selector@^0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.5.0.tgz#21c7126dc9728b31a2742d91cab20d55e67e4fb4" + integrity sha512-s8KNnmIDTBoD0p9uJ9uD0XY38SCeBOtj0UMXyQSLg1Ypfrfj8+dAvwsLjYQkQ2GjhVtp2HrnF5cJzMhBjfD8HA== dependencies: tslib "^2.0.3" filelist@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== dependencies: minimatch "^5.0.1" filesize@^8.0.6: version "8.0.7" - resolved "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== fill-range@^7.0.1: version "7.0.1" - resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== dependencies: debug "2.6.9" encodeurl "~1.0.2" @@ -6725,7 +7473,8 @@ finalhandler@1.2.0: find-cache-dir@^3.3.1: version "3.3.2" - resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== dependencies: commondir "^1.0.1" make-dir "^3.0.2" @@ -6733,52 +7482,61 @@ find-cache-dir@^3.3.1: find-root@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" + integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== find-up@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== dependencies: locate-path "^3.0.0" find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" path-exists "^4.0.0" find-up@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: locate-path "^6.0.0" path-exists "^4.0.0" flat-cache@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== dependencies: flatted "^3.1.0" rimraf "^3.0.2" flatted@^3.1.0: version "3.2.7" - resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== follow-redirects@^1.0.0, follow-redirects@^1.14.8: version "1.15.2" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== for-each@^0.3.3: version "0.3.3" - resolved "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== dependencies: is-callable "^1.1.3" fork-ts-checker-webpack-plugin@^6.5.0: version "6.5.3" resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz#eda2eff6e22476a2688d10661688c47f611b37f3" + integrity sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ== dependencies: "@babel/code-frame" "^7.8.3" "@types/json-schema" "^7.0.5" @@ -6796,15 +7554,17 @@ fork-ts-checker-webpack-plugin@^6.5.0: form-data@^3.0.0: version "3.0.1" - resolved "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" mime-types "^2.1.12" formik@^2.2.9: - version "2.4.2" - resolved "https://registry.yarnpkg.com/formik/-/formik-2.4.2.tgz#a1115457cfb012a5c782cea3ad4b40b2fe36fa18" + version "2.4.3" + resolved "https://registry.yarnpkg.com/formik/-/formik-2.4.3.tgz#6020e85eb3e3e8415b3b19d6f4f65793ab754b24" + integrity sha512-2Dy79Szw3zlXmZiokUdKsn+n1ow4G8hRrC/n92cOWHNTWXCRpQXlyvz6HcjW7aSQZrldytvDOavYjhfmDnUq8Q== dependencies: deepmerge "^2.1.1" hoist-non-react-statics "^3.3.0" @@ -6816,19 +7576,23 @@ formik@^2.2.9: forwarded@0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== fraction.js@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== fresh@0.5.2: version "0.5.2" - resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== fs-extra@^10.0.0: version "10.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== dependencies: graceful-fs "^4.2.0" jsonfile "^6.0.1" @@ -6836,7 +7600,8 @@ fs-extra@^10.0.0: fs-extra@^9.0.0, fs-extra@^9.0.1: version "9.1.0" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== dependencies: at-least-node "^1.0.0" graceful-fs "^4.2.0" @@ -6846,22 +7611,27 @@ fs-extra@^9.0.0, fs-extra@^9.0.1: fs-monkey@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.4.tgz#ee8c1b53d3fe8bb7e5d2c5c5dfc0168afdd2f747" + integrity sha512-INM/fWAxMICjttnD0DX1rBvinKskj5G1w+oy/pnm9u/tSlnBrzFonJMcalKJ30P8RRsPzKcCG7Q8l0jx5Fh9YQ== fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@^2.3.2, fsevents@~2.3.2: version "2.3.2" - resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== function-bind@^1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== function.prototype.name@^1.1.5: version "1.1.5" - resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== dependencies: call-bind "^1.0.2" define-properties "^1.1.3" @@ -6871,18 +7641,22 @@ function.prototype.name@^1.1.5: functions-have-names@^1.2.2, functions-have-names@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0: +get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0, get-intrinsic@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" + integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== dependencies: function-bind "^1.1.1" has "^1.0.3" @@ -6891,42 +7665,50 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@ get-own-enumerable-property-symbols@^3.0.0: version "3.0.2" - resolved "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== get-stream@^6.0.0: version "6.0.1" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== get-symbol-description@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.1" glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" glob-parent@^6.0.2: version "6.0.2" - resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" glob-to-regexp@^0.4.1: version "0.4.1" - resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== glob@7.1.6: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -6937,7 +7719,8 @@ glob@7.1.6: glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" - resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -6948,13 +7731,15 @@ glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: global-modules@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== dependencies: global-prefix "^3.0.0" global-prefix@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== dependencies: ini "^1.3.5" kind-of "^6.0.2" @@ -6962,23 +7747,27 @@ global-prefix@^3.0.0: globals@^11.1.0: version "11.12.0" - resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.19.0: - version "13.20.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82" + version "13.21.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.21.0.tgz#163aae12f34ef502f5153cfbdd3600f36c63c571" + integrity sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg== dependencies: type-fest "^0.20.2" globalthis@^1.0.3: version "1.0.3" - resolved "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" + integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== dependencies: define-properties "^1.1.3" globby@^11.0.4, globby@^11.1.0: version "11.1.0" - resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" dir-glob "^3.0.1" @@ -6989,107 +7778,129 @@ globby@^11.0.4, globby@^11.1.0: gopd@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== dependencies: get-intrinsic "^1.1.3" graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== graphemer@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== graphql-tag@^2.12.6, graphql-tag@^2.4.2: version "2.12.6" - resolved "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.6.tgz" + resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1" + integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg== dependencies: tslib "^2.1.0" graphql@15.8.0: version "15.8.0" - resolved "https://registry.npmjs.org/graphql/-/graphql-15.8.0.tgz" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.8.0.tgz#33410e96b012fa3bdb1091cc99a94769db212b38" + integrity sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw== gzip-size@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== dependencies: duplexer "^0.1.2" handle-thing@^2.0.0: version "2.0.1" - resolved "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== harmony-reflect@^1.4.6: version "1.6.2" - resolved "https://registry.npmjs.org/harmony-reflect/-/harmony-reflect-1.6.2.tgz" + resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== has-property-descriptors@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== dependencies: get-intrinsic "^1.1.1" has-proto@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" - resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== has-tostringtag@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== dependencies: has-symbols "^1.0.2" has@^1.0.3: version "1.0.3" - resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" he@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== history@^5.0.3: version "5.3.0" - resolved "https://registry.npmjs.org/history/-/history-5.3.0.tgz" + resolved "https://registry.yarnpkg.com/history/-/history-5.3.0.tgz#1548abaa245ba47992f063a0783db91ef201c73b" + integrity sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ== dependencies: "@babel/runtime" "^7.7.6" hoist-non-react-statics@^3.2.1, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: version "3.3.2" - resolved "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== dependencies: react-is "^16.7.0" hoopy@^0.1.4: version "0.1.4" - resolved "https://registry.npmjs.org/hoopy/-/hoopy-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== hotkeys-js@^3.8.7: - version "3.11.1" - resolved "https://registry.yarnpkg.com/hotkeys-js/-/hotkeys-js-3.11.1.tgz#63d374faaddd1a28b8c3013e3a486d5cf78e968a" + version "3.12.0" + resolved "https://registry.yarnpkg.com/hotkeys-js/-/hotkeys-js-3.12.0.tgz#5534a7ffdba923df489ffbd876b991979beb2c77" + integrity sha512-Z+N573ycUKIGwFYS3ID1RzMJiGmtWMGKMiaNLyJS8B1ei+MllF4ZYmKS2T0kMWBktOz+WZLVNikftEgnukOrXg== hpack.js@^2.1.6: version "2.1.6" - resolved "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz" + resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== dependencies: inherits "^2.0.1" obuf "^1.0.0" @@ -7098,21 +7909,25 @@ hpack.js@^2.1.6: html-encoding-sniffer@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== dependencies: whatwg-encoding "^1.0.5" html-entities@^2.1.0, html-entities@^2.3.2: version "2.4.0" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" + integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== html-minifier-terser@^6.0.2: version "6.1.0" - resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== dependencies: camel-case "^4.1.2" clean-css "^5.2.2" @@ -7125,6 +7940,7 @@ html-minifier-terser@^6.0.2: html-webpack-plugin@^5.5.0: version "5.5.3" resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz#72270f4a78e222b5825b296e5e3e1328ad525a3e" + integrity sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg== dependencies: "@types/html-minifier-terser" "^6.0.0" html-minifier-terser "^6.0.2" @@ -7134,7 +7950,8 @@ html-webpack-plugin@^5.5.0: htmlparser2@^6.1.0: version "6.1.0" - resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== dependencies: domelementtype "^2.0.1" domhandler "^4.0.0" @@ -7143,11 +7960,13 @@ htmlparser2@^6.1.0: http-deceiver@^1.2.7: version "1.2.7" - resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" + resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== http-errors@2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== dependencies: depd "2.0.0" inherits "2.0.4" @@ -7157,7 +7976,8 @@ http-errors@2.0.0: http-errors@~1.6.2: version "1.6.3" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== dependencies: depd "~1.1.2" inherits "2.0.3" @@ -7166,11 +7986,13 @@ http-errors@~1.6.2: http-parser-js@>=0.5.1: version "0.5.8" - resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== http-proxy-agent@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== dependencies: "@tootallnate/once" "1" agent-base "6" @@ -7178,7 +8000,8 @@ http-proxy-agent@^4.0.1: http-proxy-middleware@^2.0.3: version "2.0.6" - resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== dependencies: "@types/http-proxy" "^1.17.8" http-proxy "^1.18.1" @@ -7188,7 +8011,8 @@ http-proxy-middleware@^2.0.3: http-proxy@^1.18.1: version "1.18.1" - resolved "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== dependencies: eventemitter3 "^4.0.0" follow-redirects "^1.0.0" @@ -7196,109 +8020,131 @@ http-proxy@^1.18.1: https-proxy-agent@^5.0.0: version "5.0.1" - resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== dependencies: agent-base "6" debug "4" human-signals@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== hyphenate-style-name@^1.0.3: version "1.0.4" - resolved "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz#691879af8e220aea5750e8827db4ef62a54e361d" + integrity sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ== iconv-lite@0.4.24: version "0.4.24" - resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" iconv-lite@^0.6.3: version "0.6.3" - resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== dependencies: safer-buffer ">= 2.1.2 < 3.0.0" icss-utils@^5.0.0, icss-utils@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== idb@5.0.6: version "5.0.6" - resolved "https://registry.npmjs.org/idb/-/idb-5.0.6.tgz" + resolved "https://registry.yarnpkg.com/idb/-/idb-5.0.6.tgz#8c94624f5a8a026abe3bef3c7166a5febd1cadc1" + integrity sha512-/PFvOWPzRcEPmlDt5jEvzVZVs0wyd/EvGvkDIcbBpGuMMLQKrTPG0TxvE2UJtgZtCQCmOtM2QD7yQJBVEjKGOw== idb@^7.0.1: version "7.1.1" - resolved "https://registry.npmjs.org/idb/-/idb-7.1.1.tgz" + resolved "https://registry.yarnpkg.com/idb/-/idb-7.1.1.tgz#d910ded866d32c7ced9befc5bfdf36f572ced72b" + integrity sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ== identity-obj-proxy@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== dependencies: harmony-reflect "^1.4.6" ieee754@^1.1.13, ieee754@^1.1.4: version "1.2.1" - resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== ignore@^5.2.0: version "5.2.4" - resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== immer@9.0.6: version "9.0.6" - resolved "https://registry.npmjs.org/immer/-/immer-9.0.6.tgz" + resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.6.tgz#7a96bf2674d06c8143e327cbf73539388ddf1a73" + integrity sha512-G95ivKpy+EvVAnAab4fVa4YGYn24J1SpEktnJX7JJ45Bd7xqME/SCplFzYFmTbrkwZbQ4xJK1xMTUYBkN6pWsQ== immer@^9.0.21, immer@^9.0.7: version "9.0.21" resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" + integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: +import-fresh@^3.1.0, import-fresh@^3.2.1: version "3.3.0" - resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: parent-module "^1.0.0" resolve-from "^4.0.0" import-local@^3.0.2: version "3.1.0" - resolved "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== dependencies: pkg-dir "^4.2.0" resolve-cwd "^3.0.0" imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== inflight@^1.0.4: version "1.0.6" - resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" wrappy "1" inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: version "2.0.4" - resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== inherits@2.0.3: version "2.0.3" - resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== ini@^1.3.5: version "1.3.8" - resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== internal-slot@^1.0.3, internal-slot@^1.0.4, internal-slot@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" + integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== dependencies: get-intrinsic "^1.2.0" has "^1.0.3" @@ -7306,21 +8152,25 @@ internal-slot@^1.0.3, internal-slot@^1.0.4, internal-slot@^1.0.5: invariant@^2.2.4: version "2.2.4" - resolved "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== dependencies: loose-envify "^1.0.0" ipaddr.js@1.9.1: version "1.9.1" - resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== ipaddr.js@^2.0.1: version "2.1.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" + integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== is-arguments@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== dependencies: call-bind "^1.0.2" has-tostringtag "^1.0.0" @@ -7328,6 +8178,7 @@ is-arguments@^1.1.1: is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" + integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== dependencies: call-bind "^1.0.2" get-intrinsic "^1.2.0" @@ -7335,211 +8186,270 @@ is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-async-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.0.0.tgz#8e4418efd3e5d3a6ebb0164c05ef5afb69aa9646" + integrity sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA== + dependencies: + has-tostringtag "^1.0.0" is-bigint@^1.0.1: version "1.0.4" - resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== dependencies: has-bigints "^1.0.1" is-binary-path@~2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-boolean-object@^1.1.0: version "1.1.2" - resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== dependencies: call-bind "^1.0.2" has-tostringtag "^1.0.0" is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" - resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== -is-core-module@^2.11.0, is-core-module@^2.9.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" +is-core-module@^2.12.1, is-core-module@^2.13.0, is-core-module@^2.9.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== dependencies: has "^1.0.3" is-date-object@^1.0.1, is-date-object@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== dependencies: has-tostringtag "^1.0.0" is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" - resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-finalizationregistry@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz#c8749b65f17c133313e661b1289b95ad3dbd62e6" + integrity sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw== + dependencies: + call-bind "^1.0.2" is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-generator-fn@^2.0.0: version "2.1.0" - resolved "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-generator-function@^1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + dependencies: + has-tostringtag "^1.0.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" - resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" is-in-browser@^1.0.2, is-in-browser@^1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/is-in-browser/-/is-in-browser-1.1.3.tgz#56ff4db683a078c6082eb95dad7dc62e1d04f835" + integrity sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g== is-map@^2.0.1, is-map@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" + integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== is-module@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== is-negative-zero@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== is-number-object@^1.0.4: version "1.0.7" - resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== dependencies: has-tostringtag "^1.0.0" is-number@^7.0.0: version "7.0.0" - resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-obj@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== is-path-inside@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== is-plain-obj@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== is-potential-custom-element-name@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== is-regex@^1.1.4: version "1.1.4" - resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== dependencies: call-bind "^1.0.2" has-tostringtag "^1.0.0" is-regexp@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== is-root@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== is-set@^2.0.1, is-set@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" + integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== is-shared-array-buffer@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== dependencies: call-bind "^1.0.2" is-stream@^2.0.0: version "2.0.1" - resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" - resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== dependencies: has-tostringtag "^1.0.0" is-symbol@^1.0.2, is-symbol@^1.0.3: version "1.0.4" - resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== dependencies: has-symbols "^1.0.2" is-typed-array@^1.1.10, is-typed-array@^1.1.9: - version "1.1.10" - resolved "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz" + version "1.1.12" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" - for-each "^0.3.3" - gopd "^1.0.1" - has-tostringtag "^1.0.0" + which-typed-array "^1.1.11" is-typedarray@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== is-weakmap@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" + integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== is-weakref@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== dependencies: call-bind "^1.0.2" is-weakset@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" + integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.1" is-wsl@^2.2.0: version "2.2.0" - resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== dependencies: is-docker "^2.0.0" isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== isarray@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== isexe@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== isomorphic-unfetch@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/isomorphic-unfetch/-/isomorphic-unfetch-3.1.0.tgz#87341d5f4f7b63843d468438128cb087b7c3e98f" + integrity sha512-geDJjpoZ8N0kWexiwkX8F9NkTsXhetLPVbZFQ+JTW239QNOwvB0gniuR1Wc6f0AMTn7/mFGyXvHTifrCp/GH8Q== dependencies: node-fetch "^2.6.1" unfetch "^4.2.0" istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" - resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: version "5.2.1" - resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: "@babel/core" "^7.12.3" "@babel/parser" "^7.14.7" @@ -7548,31 +8458,46 @@ istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: semver "^6.3.0" istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz" + version "3.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== dependencies: istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" + make-dir "^4.0.0" supports-color "^7.1.0" istanbul-lib-source-maps@^4.0.0: version "4.0.1" - resolved "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== dependencies: debug "^4.1.1" istanbul-lib-coverage "^3.0.0" source-map "^0.6.1" istanbul-reports@^3.1.3: - version "3.1.5" - resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz" + version "3.1.6" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" + integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" +iterator.prototype@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/iterator.prototype/-/iterator.prototype-1.1.0.tgz#690c88b043d821f783843aaf725d7ac3b62e3b46" + integrity sha512-rjuhAk1AJ1fssphHD0IFV6TWL40CwRZ53FrztKx43yk2v6rguBYsY4Bj1VU4HmoMmKwZUlx7mfnhDf9cOp4YTw== + dependencies: + define-properties "^1.1.4" + get-intrinsic "^1.1.3" + has-symbols "^1.0.3" + has-tostringtag "^1.0.0" + reflect.getprototypeof "^1.0.3" + jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" + integrity sha512-ZDi3aP+fG/LchyBzUM804VjddnwfSfsdeYkwt8NcbKRvo4rFkjhs456iLFn3k2ZUWvNe4i48WACDbza8fhq2+w== dependencies: async "^3.2.3" chalk "^4.0.2" @@ -7581,7 +8506,8 @@ jake@^10.8.5: jest-changed-files@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== dependencies: "@jest/types" "^27.5.1" execa "^5.0.0" @@ -7589,7 +8515,8 @@ jest-changed-files@^27.5.1: jest-circus@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-circus/-/jest-circus-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== dependencies: "@jest/environment" "^27.5.1" "@jest/test-result" "^27.5.1" @@ -7613,7 +8540,8 @@ jest-circus@^27.5.1: jest-cli@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-cli/-/jest-cli-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== dependencies: "@jest/core" "^27.5.1" "@jest/test-result" "^27.5.1" @@ -7630,7 +8558,8 @@ jest-cli@^27.5.1: jest-config@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-config/-/jest-config-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== dependencies: "@babel/core" "^7.8.0" "@jest/test-sequencer" "^27.5.1" @@ -7659,31 +8588,35 @@ jest-config@^27.5.1: jest-diff@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-diff/-/jest-diff-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== dependencies: chalk "^4.0.0" diff-sequences "^27.5.1" jest-get-type "^27.5.1" pretty-format "^27.5.1" -jest-diff@^29.6.1: - version "29.6.1" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.6.1.tgz#13df6db0a89ee6ad93c747c75c85c70ba941e545" +jest-diff@^29.6.2: + version "29.6.2" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.6.2.tgz#c36001e5543e82a0805051d3ceac32e6825c1c46" + integrity sha512-t+ST7CB9GX5F2xKwhwCf0TAR17uNDiaPTZnVymP9lw0lssa9vG+AFyDZoeIHStU3WowFFwT+ky+er0WVl2yGhA== dependencies: chalk "^4.0.0" diff-sequences "^29.4.3" jest-get-type "^29.4.3" - pretty-format "^29.6.1" + pretty-format "^29.6.2" jest-docblock@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== dependencies: detect-newline "^3.0.0" jest-each@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-each/-/jest-each-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== dependencies: "@jest/types" "^27.5.1" chalk "^4.0.0" @@ -7693,7 +8626,8 @@ jest-each@^27.5.1: jest-environment-jsdom@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== dependencies: "@jest/environment" "^27.5.1" "@jest/fake-timers" "^27.5.1" @@ -7705,7 +8639,8 @@ jest-environment-jsdom@^27.5.1: jest-environment-node@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== dependencies: "@jest/environment" "^27.5.1" "@jest/fake-timers" "^27.5.1" @@ -7716,15 +8651,18 @@ jest-environment-node@^27.5.1: jest-get-type@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== jest-get-type@^29.4.3: version "29.4.3" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.4.3.tgz#1ab7a5207c995161100b5187159ca82dd48b3dd5" + integrity sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg== jest-haste-map@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== dependencies: "@jest/types" "^27.5.1" "@types/graceful-fs" "^4.1.2" @@ -7743,7 +8681,8 @@ jest-haste-map@^27.5.1: jest-jasmine2@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== dependencies: "@jest/environment" "^27.5.1" "@jest/source-map" "^27.5.1" @@ -7765,32 +8704,36 @@ jest-jasmine2@^27.5.1: jest-leak-detector@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== dependencies: jest-get-type "^27.5.1" pretty-format "^27.5.1" jest-matcher-utils@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== dependencies: chalk "^4.0.0" jest-diff "^27.5.1" jest-get-type "^27.5.1" pretty-format "^27.5.1" -jest-matcher-utils@^29.6.1: - version "29.6.1" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.6.1.tgz#6c60075d84655d6300c5d5128f46531848160b53" +jest-matcher-utils@^29.6.2: + version "29.6.2" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.6.2.tgz#39de0be2baca7a64eacb27291f0bd834fea3a535" + integrity sha512-4LiAk3hSSobtomeIAzFTe+N8kL6z0JtF3n6I4fg29iIW7tt99R7ZcIFW34QkX+DuVrf+CUe6wuVOpm7ZKFJzZQ== dependencies: chalk "^4.0.0" - jest-diff "^29.6.1" + jest-diff "^29.6.2" jest-get-type "^29.4.3" - pretty-format "^29.6.1" + pretty-format "^29.6.2" jest-message-util@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== dependencies: "@babel/code-frame" "^7.12.13" "@jest/types" "^27.5.1" @@ -7804,7 +8747,8 @@ jest-message-util@^27.5.1: jest-message-util@^28.1.3: version "28.1.3" - resolved "https://registry.npmjs.org/jest-message-util/-/jest-message-util-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== dependencies: "@babel/code-frame" "^7.12.13" "@jest/types" "^28.1.3" @@ -7816,9 +8760,10 @@ jest-message-util@^28.1.3: slash "^3.0.0" stack-utils "^2.0.3" -jest-message-util@^29.6.1: - version "29.6.1" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.6.1.tgz#d0b21d87f117e1b9e165e24f245befd2ff34ff8d" +jest-message-util@^29.6.2: + version "29.6.2" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.6.2.tgz#af7adc2209c552f3f5ae31e77cf0a261f23dc2bb" + integrity sha512-vnIGYEjoPSuRqV8W9t+Wow95SDp6KPX2Uf7EoeG9G99J2OVh7OSwpS4B6J0NfpEIpfkBNHlBZpA2rblEuEFhZQ== dependencies: "@babel/code-frame" "^7.12.13" "@jest/types" "^29.6.1" @@ -7826,32 +8771,37 @@ jest-message-util@^29.6.1: chalk "^4.0.0" graceful-fs "^4.2.9" micromatch "^4.0.4" - pretty-format "^29.6.1" + pretty-format "^29.6.2" slash "^3.0.0" stack-utils "^2.0.3" jest-mock@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-mock/-/jest-mock-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== dependencies: "@jest/types" "^27.5.1" "@types/node" "*" jest-pnp-resolver@^1.2.2: version "1.2.3" - resolved "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" + integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== jest-regex-util@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== jest-regex-util@^28.0.0: version "28.0.2" - resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-28.0.2.tgz" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== jest-resolve-dependencies@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== dependencies: "@jest/types" "^27.5.1" jest-regex-util "^27.5.1" @@ -7859,7 +8809,8 @@ jest-resolve-dependencies@^27.5.1: jest-resolve@^27.4.2, jest-resolve@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== dependencies: "@jest/types" "^27.5.1" chalk "^4.0.0" @@ -7874,7 +8825,8 @@ jest-resolve@^27.4.2, jest-resolve@^27.5.1: jest-runner@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-runner/-/jest-runner-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== dependencies: "@jest/console" "^27.5.1" "@jest/environment" "^27.5.1" @@ -7900,7 +8852,8 @@ jest-runner@^27.5.1: jest-runtime@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== dependencies: "@jest/environment" "^27.5.1" "@jest/fake-timers" "^27.5.1" @@ -7927,14 +8880,16 @@ jest-runtime@^27.5.1: jest-serializer@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== dependencies: "@types/node" "*" graceful-fs "^4.2.9" jest-snapshot@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== dependencies: "@babel/core" "^7.7.2" "@babel/generator" "^7.7.2" @@ -7961,7 +8916,8 @@ jest-snapshot@^27.5.1: jest-util@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-util/-/jest-util-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== dependencies: "@jest/types" "^27.5.1" "@types/node" "*" @@ -7972,7 +8928,8 @@ jest-util@^27.5.1: jest-util@^28.1.3: version "28.1.3" - resolved "https://registry.npmjs.org/jest-util/-/jest-util-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== dependencies: "@jest/types" "^28.1.3" "@types/node" "*" @@ -7981,9 +8938,10 @@ jest-util@^28.1.3: graceful-fs "^4.2.9" picomatch "^2.2.3" -jest-util@^29.6.1: - version "29.6.1" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.6.1.tgz#c9e29a87a6edbf1e39e6dee2b4689b8a146679cb" +jest-util@^29.6.2: + version "29.6.2" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.6.2.tgz#8a052df8fff2eebe446769fd88814521a517664d" + integrity sha512-3eX1qb6L88lJNCFlEADKOkjpXJQyZRiavX1INZ4tRnrBVr2COd3RgcTLyUiEXMNBlDU/cgYq6taUS0fExrWW4w== dependencies: "@jest/types" "^29.6.1" "@types/node" "*" @@ -7994,7 +8952,8 @@ jest-util@^29.6.1: jest-validate@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-validate/-/jest-validate-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== dependencies: "@jest/types" "^27.5.1" camelcase "^6.2.0" @@ -8005,7 +8964,8 @@ jest-validate@^27.5.1: jest-watch-typeahead@^1.0.0: version "1.1.0" - resolved "https://registry.npmjs.org/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== dependencies: ansi-escapes "^4.3.1" chalk "^4.0.0" @@ -8017,7 +8977,8 @@ jest-watch-typeahead@^1.0.0: jest-watcher@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== dependencies: "@jest/test-result" "^27.5.1" "@jest/types" "^27.5.1" @@ -8029,7 +8990,8 @@ jest-watcher@^27.5.1: jest-watcher@^28.0.0: version "28.1.3" - resolved "https://registry.npmjs.org/jest-watcher/-/jest-watcher-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== dependencies: "@jest/test-result" "^28.1.3" "@jest/types" "^28.1.3" @@ -8042,7 +9004,8 @@ jest-watcher@^28.0.0: jest-worker@^26.2.1: version "26.6.2" - resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-26.6.2.tgz" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== dependencies: "@types/node" "*" merge-stream "^2.0.0" @@ -8050,7 +9013,8 @@ jest-worker@^26.2.1: jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== dependencies: "@types/node" "*" merge-stream "^2.0.0" @@ -8058,7 +9022,8 @@ jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: jest-worker@^28.0.2: version "28.1.3" - resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== dependencies: "@types/node" "*" merge-stream "^2.0.0" @@ -8066,7 +9031,8 @@ jest-worker@^28.0.2: jest@^27.4.3: version "27.5.1" - resolved "https://registry.npmjs.org/jest/-/jest-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== dependencies: "@jest/core" "^27.5.1" import-local "^3.0.2" @@ -8075,31 +9041,37 @@ jest@^27.4.3: jiti@^1.18.2: version "1.19.1" resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.19.1.tgz#fa99e4b76a23053e0e7cde098efe1704a14c16f1" + integrity sha512-oVhqoRDaBXf7sjkll95LHVS6Myyyb1zaunVwk4Z0+WPSW4gjS0pl01zYKHScTuyEhQsFxV5L4DR5r+YqSyqyyg== js-cookie@^2.2.1: version "2.2.1" - resolved "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz" + resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" + integrity sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ== "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" esprima "^4.0.0" js-yaml@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" jsdom@^16.6.0: version "16.7.0" - resolved "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== dependencies: abab "^2.0.5" acorn "^8.2.4" @@ -8131,45 +9103,55 @@ jsdom@^16.6.0: jsesc@^2.5.1: version "2.5.2" - resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@~0.5.0: version "0.5.0" - resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" - resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-schema-traverse@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== json-schema@^0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json5@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" json5@^2.1.2, json5@^2.2.0, json5@^2.2.2: version "2.2.3" - resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== jsonfile@^6.0.1: version "6.1.0" - resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== dependencies: universalify "^2.0.0" optionalDependencies: @@ -8177,11 +9159,13 @@ jsonfile@^6.0.1: jsonpointer@^5.0.0: version "5.0.1" - resolved "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== jss-plugin-camel-case@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-camel-case/-/jss-plugin-camel-case-10.10.0.tgz#27ea159bab67eb4837fa0260204eb7925d4daa1c" + integrity sha512-z+HETfj5IYgFxh1wJnUAU8jByI48ED+v0fuTuhKrPR+pRBYS2EDwbusU8aFOpCdYhtRc9zhN+PJ7iNE8pAWyPw== dependencies: "@babel/runtime" "^7.3.1" hyphenate-style-name "^1.0.3" @@ -8190,6 +9174,7 @@ jss-plugin-camel-case@^10.10.0: jss-plugin-default-unit@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-default-unit/-/jss-plugin-default-unit-10.10.0.tgz#db3925cf6a07f8e1dd459549d9c8aadff9804293" + integrity sha512-SvpajxIECi4JDUbGLefvNckmI+c2VWmP43qnEy/0eiwzRUsafg5DVSIWSzZe4d2vFX1u9nRDP46WCFV/PXVBGQ== dependencies: "@babel/runtime" "^7.3.1" jss "10.10.0" @@ -8197,6 +9182,7 @@ jss-plugin-default-unit@^10.10.0: jss-plugin-global@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-global/-/jss-plugin-global-10.10.0.tgz#1c55d3c35821fab67a538a38918292fc9c567efd" + integrity sha512-icXEYbMufiNuWfuazLeN+BNJO16Ge88OcXU5ZDC2vLqElmMybA31Wi7lZ3lf+vgufRocvPj8443irhYRgWxP+A== dependencies: "@babel/runtime" "^7.3.1" jss "10.10.0" @@ -8204,6 +9190,7 @@ jss-plugin-global@^10.10.0: jss-plugin-nested@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-nested/-/jss-plugin-nested-10.10.0.tgz#db872ed8925688806e77f1fc87f6e62264513219" + integrity sha512-9R4JHxxGgiZhurDo3q7LdIiDEgtA1bTGzAbhSPyIOWb7ZubrjQe8acwhEQ6OEKydzpl8XHMtTnEwHXCARLYqYA== dependencies: "@babel/runtime" "^7.3.1" jss "10.10.0" @@ -8212,6 +9199,7 @@ jss-plugin-nested@^10.10.0: jss-plugin-props-sort@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-props-sort/-/jss-plugin-props-sort-10.10.0.tgz#67f4dd4c70830c126f4ec49b4b37ccddb680a5d7" + integrity sha512-5VNJvQJbnq/vRfje6uZLe/FyaOpzP/IH1LP+0fr88QamVrGJa0hpRRyAa0ea4U/3LcorJfBFVyC4yN2QC73lJg== dependencies: "@babel/runtime" "^7.3.1" jss "10.10.0" @@ -8219,6 +9207,7 @@ jss-plugin-props-sort@^10.10.0: jss-plugin-rule-value-function@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-rule-value-function/-/jss-plugin-rule-value-function-10.10.0.tgz#7d99e3229e78a3712f78ba50ab342e881d26a24b" + integrity sha512-uEFJFgaCtkXeIPgki8ICw3Y7VMkL9GEan6SqmT9tqpwM+/t+hxfMUdU4wQ0MtOiMNWhwnckBV0IebrKcZM9C0g== dependencies: "@babel/runtime" "^7.3.1" jss "10.10.0" @@ -8227,6 +9216,7 @@ jss-plugin-rule-value-function@^10.10.0: jss-plugin-vendor-prefixer@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss-plugin-vendor-prefixer/-/jss-plugin-vendor-prefixer-10.10.0.tgz#c01428ef5a89f2b128ec0af87a314d0c767931c7" + integrity sha512-UY/41WumgjW8r1qMCO8l1ARg7NHnfRVWRhZ2E2m0DMYsr2DD91qIXLyNhiX83hHswR7Wm4D+oDYNC1zWCJWtqg== dependencies: "@babel/runtime" "^7.3.1" css-vendor "^2.0.8" @@ -8235,22 +9225,17 @@ jss-plugin-vendor-prefixer@^10.10.0: jss@10.10.0, jss@^10.10.0: version "10.10.0" resolved "https://registry.yarnpkg.com/jss/-/jss-10.10.0.tgz#a75cc85b0108c7ac8c7b7d296c520a3e4fbc6ccc" + integrity sha512-cqsOTS7jqPsPMjtKYDUpdFC0AbhYFLTcuGRqymgmdJIeQ8cH7+AgX7YSgQy79wXloZq2VvATYxUOUQEvS1V/Zw== dependencies: "@babel/runtime" "^7.3.1" csstype "^3.0.2" is-in-browser "^1.1.3" tiny-warning "^1.0.2" -"jsx-ast-utils@^2.4.1 || ^3.0.0": - version "3.3.3" - resolved "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz" - dependencies: - array-includes "^3.1.5" - object.assign "^4.1.3" - -jsx-ast-utils@^3.3.3: - version "3.3.4" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.4.tgz#b896535fed5b867650acce5a9bd4135ffc7b3bf9" +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.3: + version "3.3.5" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz#4766bd05a8e2a11af222becd19e15575e52a853a" + integrity sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ== dependencies: array-includes "^3.1.6" array.prototype.flat "^1.3.1" @@ -8259,70 +9244,76 @@ jsx-ast-utils@^3.3.3: jwt-decode@^3.1.2: version "3.1.2" - resolved "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz" + resolved "https://registry.yarnpkg.com/jwt-decode/-/jwt-decode-3.1.2.tgz#3fb319f3675a2df0c2895c8f5e9fa4b67b04ed59" + integrity sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A== kind-of@^6.0.2: version "6.0.3" - resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== kleur@^3.0.3: version "3.0.3" - resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== klona@^2.0.4, klona@^2.0.5: version "2.0.6" resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" + integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== language-subtag-registry@~0.3.2: version "0.3.22" resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== language-tags@=1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== dependencies: language-subtag-registry "~0.3.2" launch-editor@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" + integrity sha512-JpDCcQnyAAzZZaZ7vEiSqL690w7dAEyLao+KC96zBplnYbJS7TYNjvM3M7y3dGz+v7aIsJk3hllWuc0kWAjyRQ== dependencies: picocolors "^1.0.0" shell-quote "^1.7.3" leven@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== levn@^0.4.1: version "0.4.1" - resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" type-check "~0.4.0" -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz" - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" + integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== lines-and-columns@^1.1.6: version "1.2.4" - resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== loader-runner@^4.2.0: version "4.3.0" - resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== loader-utils@^2.0.0, loader-utils@^2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" @@ -8330,219 +9321,276 @@ loader-utils@^2.0.0, loader-utils@^2.0.4: loader-utils@^3.2.0: version "3.2.1" - resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.1.tgz#4fb104b599daafd82ef3e1a41fb9265f87e1f576" + integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== locate-path@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== dependencies: p-locate "^3.0.0" path-exists "^3.0.0" locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" lodash-es@^4.17.21: version "4.17.21" - resolved "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz" + resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" + integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== lodash.debounce@^4.0.8: version "4.0.8" - resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== lodash.memoize@^4.1.2: version "4.1.2" - resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== lodash.merge@^4.6.2: version "4.6.2" - resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== lodash.sortby@^4.7.0: version "4.7.0" - resolved "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== lodash.throttle@^4.1.1: version "4.1.1" - resolved "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz" + resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4" + integrity sha512-wIkUCfVKpVsWo3JSZlc+8MB5it+2AN5W8J7YVMST30UrvcQNZ1Okbj+rbVniijTWE6FGYy4XJq/rHkas8qJMLQ== lodash.uniq@^4.5.0: version "4.5.0" - resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" + resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: version "4.17.21" - resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: version "1.4.0" - resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" lower-case@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== dependencies: tslib "^2.0.3" lru-cache@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" lz-string@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.5.0.tgz#c1ab50f77887b712621201ba9fd4e3a6ed099941" + integrity sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ== magic-string@^0.25.0, magic-string@^0.25.7: version "0.25.9" - resolved "https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== dependencies: sourcemap-codec "^1.4.8" -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: +make-dir@^3.0.2, make-dir@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== dependencies: semver "^6.0.0" +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== + dependencies: + semver "^7.5.3" + makeerror@1.0.12: version "1.0.12" - resolved "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== dependencies: tmpl "1.0.5" map-obj@^4.0.0: version "4.3.0" - resolved "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" + integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== mdn-data@2.0.14: version "2.0.14" - resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== mdn-data@2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== media-typer@0.3.0: version "0.3.0" - resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== memfs@^3.1.2, memfs@^3.4.3: version "3.6.0" resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" + integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== dependencies: fs-monkey "^1.0.4" merge-descriptors@1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== merge-stream@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +merge@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.1.tgz#38bebf80c3220a8a487b6fcfb3941bb11720c145" + integrity sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ== methods@~1.1.2: version "1.1.2" - resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.5" - resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== dependencies: braces "^3.0.2" picomatch "^2.3.1" mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": version "1.52.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" - resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" mime@1.6.0: version "1.6.0" - resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== min-indent@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== mini-css-extract-plugin@^2.4.5: version "2.7.6" resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.6.tgz#282a3d38863fddcd2e0c220aaed5b90bc156564d" + integrity sha512-Qk7HcgaPkGG6eD77mLvZS1nmxlao3j+9PkrT9Uc7HAE1id3F41+DdBRYRYkbyfNRGzm8/YWtzhw7nVPmwhqTQw== dependencies: schema-utils "^4.0.0" minimalistic-assert@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimatch@^5.0.1: version "5.1.6" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== dependencies: brace-expansion "^2.0.1" minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== mkdirp@~0.5.1: version "0.5.6" - resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== dependencies: minimist "^1.2.6" ms@2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== ms@2.1.2: version "2.1.2" - resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== ms@2.1.3, ms@^2.1.1: version "2.1.3" - resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== multicast-dns@^7.2.5: version "7.2.5" - resolved "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz" + resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== dependencies: dns-packet "^5.2.2" thunky "^1.0.2" @@ -8550,6 +9598,7 @@ multicast-dns@^7.2.5: mz@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== dependencies: any-promise "^1.0.0" object-assign "^4.0.1" @@ -8557,122 +9606,148 @@ mz@^2.7.0: nanoclone@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/nanoclone/-/nanoclone-0.2.1.tgz#dd4090f8f1a110d26bb32c49ed2f5b9235209ed4" + integrity sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA== nanoid@^3.3.6: version "3.3.6" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" + integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== natural-compare-lite@^1.4.0: version "1.4.0" - resolved "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz" + resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" + integrity sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g== natural-compare@^1.4.0: version "1.4.0" - resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== negotiator@0.6.3: version "0.6.3" - resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== neo-async@^2.6.2: version "2.6.2" - resolved "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== nice-try@^1.0.4: version "1.0.5" - resolved "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== no-case@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== dependencies: lower-case "^2.0.2" tslib "^2.0.3" -node-fetch@^2.6.1, node-fetch@^2.6.11: - version "2.6.11" - resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz" +node-fetch@^2.6.1, node-fetch@^2.6.12: + version "2.6.12" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.12.tgz#02eb8e22074018e3d5a83016649d04df0e348fba" + integrity sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g== dependencies: whatwg-url "^5.0.0" node-forge@^1: version "1.3.1" - resolved "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== node-int64@^0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== -node-releases@^2.0.12: +node-releases@^2.0.13: version "2.0.13" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== normalize-range@^0.1.2: version "0.1.2" - resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== normalize-url@^6.0.1: version "6.1.0" - resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== notistack@^2.0.3: version "2.0.8" resolved "https://registry.yarnpkg.com/notistack/-/notistack-2.0.8.tgz#78cdf34c64e311bf1d1d71c2123396bcdea5e95b" + integrity sha512-/IY14wkFp5qjPgKNvAdfL5Jp6q90+MjgKTPh4c81r/lW70KeuX6b9pE/4f8L4FG31cNudbN9siiFS5ql1aSLRw== dependencies: clsx "^1.1.0" hoist-non-react-statics "^3.3.0" npm-run-path@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== dependencies: path-key "^3.0.0" nprogress@^0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1" + integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== nth-check@^1.0.2, nth-check@^2.0.1: version "2.1.1" - resolved "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== dependencies: boolbase "^1.0.0" nwsapi@^2.2.0: version "2.2.7" resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.7.tgz#738e0707d3128cb750dddcfe90e4610482df0f30" + integrity sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ== object-assign@^4.0.1, object-assign@^4.1.1: version "4.1.1" - resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-hash@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== object-inspect@^1.12.3, object-inspect@^1.9.0: version "1.12.3" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" + integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== object-is@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== dependencies: call-bind "^1.0.2" define-properties "^1.1.3" object-keys@^1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object.assign@^4.0.4, object.assign@^4.1.3, object.assign@^4.1.4: +object.assign@^4.0.4, object.assign@^4.1.4: version "4.1.4" - resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -8681,7 +9756,8 @@ object.assign@^4.0.4, object.assign@^4.1.3, object.assign@^4.1.4: object.entries@^1.1.6: version "1.1.6" - resolved "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.6.tgz#9737d0e5b8291edd340a3e3264bb8a3b00d5fa23" + integrity sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -8689,7 +9765,8 @@ object.entries@^1.1.6: object.fromentries@^2.0.6: version "2.0.6" - resolved "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.6.tgz#cdb04da08c539cffa912dcd368b886e0904bfa73" + integrity sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -8698,6 +9775,7 @@ object.fromentries@^2.0.6: object.getownpropertydescriptors@^2.1.0: version "2.1.6" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.6.tgz#5e5c384dd209fa4efffead39e3a0512770ccc312" + integrity sha512-lq+61g26E/BgHv0ZTFgRvi7NMEPuAxLkFU7rukXjc/AlwH4Am5xXVnIXy3un1bg/JPbXHrixRkK1itUzzPiIjQ== dependencies: array.prototype.reduce "^1.0.5" call-bind "^1.0.2" @@ -8705,16 +9783,28 @@ object.getownpropertydescriptors@^2.1.0: es-abstract "^1.21.2" safe-array-concat "^1.0.0" +object.groupby@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.0.tgz#cb29259cf90f37e7bac6437686c1ea8c916d12a9" + integrity sha512-70MWG6NfRH9GnbZOikuhPPYzpUpof9iW2J9E4dW7FXTqPNb6rllE6u39SKwwiNh8lCwX3DDb5OgcKGiEBrTTyw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.21.2" + get-intrinsic "^1.2.1" + object.hasown@^1.1.2: version "1.1.2" - resolved "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.2.tgz#f919e21fad4eb38a57bc6345b3afd496515c3f92" + integrity sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw== dependencies: define-properties "^1.1.4" es-abstract "^1.20.4" object.values@^1.1.0, object.values@^1.1.6: version "1.1.6" - resolved "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.6.tgz#4abbaa71eba47d63589d402856f908243eea9b1d" + integrity sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -8722,33 +9812,39 @@ object.values@^1.1.0, object.values@^1.1.6: obuf@^1.0.0, obuf@^1.1.2: version "1.1.2" - resolved "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== on-finished@2.4.1: version "2.4.1" - resolved "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" on-headers@~1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== once@^1.3.0: version "1.4.0" - resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" onetime@^5.1.2: version "5.1.2" - resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" open@^8.0.9, open@^8.4.0: version "8.4.2" resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== dependencies: define-lazy-prop "^2.0.0" is-docker "^2.1.1" @@ -8756,31 +9852,24 @@ open@^8.0.9, open@^8.4.0: optimism@^0.10.0: version "0.10.3" - resolved "https://registry.npmjs.org/optimism/-/optimism-0.10.3.tgz" + resolved "https://registry.yarnpkg.com/optimism/-/optimism-0.10.3.tgz#163268fdc741dea2fb50f300bedda80356445fd7" + integrity sha512-9A5pqGoQk49H6Vhjb9kPgAeeECfUDF6aIICbMDL23kDLStBn1MWk3YvcZ4xWF9CsSf6XEgvRLkXy4xof/56vVw== dependencies: "@wry/context" "^0.4.0" -optimism@^0.16.2: - version "0.16.2" - resolved "https://registry.yarnpkg.com/optimism/-/optimism-0.16.2.tgz#519b0c78b3b30954baed0defe5143de7776bf081" +optimism@^0.17.5: + version "0.17.5" + resolved "https://registry.yarnpkg.com/optimism/-/optimism-0.17.5.tgz#a4c78b3ad12c58623abedbebb4f2f2c19b8e8816" + integrity sha512-TEcp8ZwK1RczmvMnvktxHSF2tKgMWjJ71xEFGX5ApLh67VsMSTy1ZUlipJw8W+KaqgOmQ+4pqwkeivY89j+4Vw== dependencies: "@wry/context" "^0.7.0" - "@wry/trie" "^0.3.0" - -optionator@^0.8.1: - version "0.8.3" - resolved "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz" - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" + "@wry/trie" "^0.4.3" + tslib "^2.3.0" optionator@^0.9.3: version "0.9.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" + integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== dependencies: "@aashutoshrathi/word-wrap" "^1.2.3" deep-is "^0.1.3" @@ -8791,65 +9880,76 @@ optionator@^0.9.3: p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-limit@^3.0.2: version "3.1.0" - resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" p-locate@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== dependencies: p-limit "^2.0.0" p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" p-retry@^4.5.0: version "4.6.2" - resolved "https://registry.npmjs.org/p-retry/-/p-retry-4.6.2.tgz" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== dependencies: "@types/retry" "0.12.0" retry "^0.13.1" p-try@^2.0.0: version "2.2.0" - resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pako@2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/pako/-/pako-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/pako/-/pako-2.0.4.tgz#6cebc4bbb0b6c73b0d5b8d7e8476e2b2fbea576d" + integrity sha512-v8tweI900AUkZN6heMU/4Uy4cXRc2AYNRggVmTR+dEncawDJgCdLMximOVA2p4qO57WMynangsfGRb5WD6L1Bg== param-case@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== dependencies: dot-case "^3.0.4" tslib "^2.0.3" parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" - resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== dependencies: "@babel/code-frame" "^7.0.0" error-ex "^1.3.1" @@ -8858,139 +9958,163 @@ parse-json@^5.0.0, parse-json@^5.2.0: parse5@6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== parseurl@~1.3.2, parseurl@~1.3.3: version "1.3.3" - resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== pascal-case@^3.1.2: version "3.1.2" - resolved "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz" + resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== dependencies: no-case "^3.0.4" tslib "^2.0.3" path-exists@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== path-key@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" - resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7: version "0.1.7" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== path-type@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== performance-now@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== picocolors@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== picocolors@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" - resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== pify@^2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== -pirates@^4.0.1: +pirates@^4.0.1, pirates@^4.0.4: version "4.0.6" resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" - -pirates@^4.0.4: - version "4.0.5" - resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== pkg-dir@^4.1.0, pkg-dir@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" pkg-up@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== dependencies: find-up "^3.0.0" polished@^1.9.3: version "1.9.3" - resolved "https://registry.npmjs.org/polished/-/polished-1.9.3.tgz" + resolved "https://registry.yarnpkg.com/polished/-/polished-1.9.3.tgz#d61b8a0c4624efe31e2583ff24a358932b6b75e1" + integrity sha512-4NmSD7fMFlM8roNxs7YXPv7UFRbYzb0gufR5zBxJLRzY54+zFsavxBo6zsQzP9ep6Hh3pC2pTyrpSTBEaB6IkQ== postcss-attribute-case-insensitive@^5.0.2: version "5.0.2" - resolved "https://registry.npmjs.org/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz" + resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" + integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== dependencies: postcss-selector-parser "^6.0.10" postcss-browser-comments@^4: version "4.0.0" - resolved "https://registry.npmjs.org/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== postcss-calc@^8.2.3: version "8.2.4" - resolved "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== dependencies: postcss-selector-parser "^6.0.9" postcss-value-parser "^4.2.0" postcss-clamp@^4.1.0: version "4.1.0" - resolved "https://registry.npmjs.org/postcss-clamp/-/postcss-clamp-4.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== dependencies: postcss-value-parser "^4.2.0" postcss-color-functional-notation@^4.2.4: version "4.2.4" - resolved "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" + integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== dependencies: postcss-value-parser "^4.2.0" postcss-color-hex-alpha@^8.0.4: version "8.0.4" - resolved "https://registry.npmjs.org/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" + integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== dependencies: postcss-value-parser "^4.2.0" postcss-color-rebeccapurple@^7.1.1: version "7.1.1" - resolved "https://registry.npmjs.org/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" + integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== dependencies: postcss-value-parser "^4.2.0" postcss-colormin@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.1.tgz#86c27c26ed6ba00d96c79e08f3ffb418d1d1988f" + integrity sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ== dependencies: browserslist "^4.21.4" caniuse-api "^3.0.0" @@ -8999,97 +10123,115 @@ postcss-colormin@^5.3.1: postcss-convert-values@^5.1.3: version "5.1.3" - resolved "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz#04998bb9ba6b65aa31035d669a6af342c5f9d393" + integrity sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA== dependencies: browserslist "^4.21.4" postcss-value-parser "^4.2.0" postcss-custom-media@^8.0.2: version "8.0.2" - resolved "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz" + resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" + integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== dependencies: postcss-value-parser "^4.2.0" postcss-custom-properties@^12.1.10: version "12.1.11" - resolved "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-12.1.11.tgz" + resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.11.tgz#d14bb9b3989ac4d40aaa0e110b43be67ac7845cf" + integrity sha512-0IDJYhgU8xDv1KY6+VgUwuQkVtmYzRwu+dMjnmdMafXYv86SWqfxkc7qdDvWS38vsjaEtv8e0vGOUQrAiMBLpQ== dependencies: postcss-value-parser "^4.2.0" postcss-custom-selectors@^6.0.3: version "6.0.3" - resolved "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz" + resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" + integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== dependencies: postcss-selector-parser "^6.0.4" postcss-dir-pseudo-class@^6.0.5: version "6.0.5" - resolved "https://registry.npmjs.org/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz" + resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" + integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== dependencies: postcss-selector-parser "^6.0.10" postcss-discard-comments@^5.1.2: version "5.1.2" - resolved "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== postcss-discard-duplicates@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== postcss-discard-empty@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== postcss-discard-overridden@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== postcss-double-position-gradients@^3.1.2: version "3.1.2" - resolved "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz" + resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" + integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== dependencies: "@csstools/postcss-progressive-custom-properties" "^1.1.0" postcss-value-parser "^4.2.0" postcss-env-function@^4.0.6: version "4.0.6" - resolved "https://registry.npmjs.org/postcss-env-function/-/postcss-env-function-4.0.6.tgz" + resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== dependencies: postcss-value-parser "^4.2.0" postcss-flexbugs-fixes@^5.0.2: version "5.0.2" - resolved "https://registry.npmjs.org/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz" + resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== postcss-focus-visible@^6.0.4: version "6.0.4" - resolved "https://registry.npmjs.org/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== dependencies: postcss-selector-parser "^6.0.9" postcss-focus-within@^5.0.4: version "5.0.4" - resolved "https://registry.npmjs.org/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== dependencies: postcss-selector-parser "^6.0.9" postcss-font-variant@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== postcss-gap-properties@^3.0.5: version "3.0.5" - resolved "https://registry.npmjs.org/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz" + resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" + integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== postcss-image-set-function@^4.0.7: version "4.0.7" - resolved "https://registry.npmjs.org/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz" + resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" + integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== dependencies: postcss-value-parser "^4.2.0" postcss-import@^15.1.0: version "15.1.0" resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" + integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== dependencies: postcss-value-parser "^4.0.0" read-cache "^1.0.0" @@ -9097,17 +10239,20 @@ postcss-import@^15.1.0: postcss-initial@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/postcss-initial/-/postcss-initial-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== postcss-js@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" + integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== dependencies: camelcase-css "^2.0.1" postcss-lab-function@^4.2.1: version "4.2.1" - resolved "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" + integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== dependencies: "@csstools/postcss-progressive-custom-properties" "^1.1.0" postcss-value-parser "^4.2.0" @@ -9115,13 +10260,15 @@ postcss-lab-function@^4.2.1: postcss-load-config@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.1.tgz#152383f481c2758274404e4962743191d73875bd" + integrity sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA== dependencies: lilconfig "^2.0.5" yaml "^2.1.1" postcss-loader@^6.2.1: version "6.2.1" - resolved "https://registry.npmjs.org/postcss-loader/-/postcss-loader-6.2.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== dependencies: cosmiconfig "^7.0.0" klona "^2.0.5" @@ -9129,15 +10276,18 @@ postcss-loader@^6.2.1: postcss-logical@^5.0.4: version "5.0.4" - resolved "https://registry.npmjs.org/postcss-logical/-/postcss-logical-5.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== postcss-media-minmax@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== postcss-merge-longhand@^5.1.7: version "5.1.7" - resolved "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz#24a1bdf402d9ef0e70f568f39bdc0344d568fb16" + integrity sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ== dependencies: postcss-value-parser "^4.2.0" stylehacks "^5.1.1" @@ -9145,6 +10295,7 @@ postcss-merge-longhand@^5.1.7: postcss-merge-rules@^5.1.4: version "5.1.4" resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz#2f26fa5cacb75b1402e213789f6766ae5e40313c" + integrity sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g== dependencies: browserslist "^4.21.4" caniuse-api "^3.0.0" @@ -9153,13 +10304,15 @@ postcss-merge-rules@^5.1.4: postcss-minify-font-values@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== dependencies: postcss-value-parser "^4.2.0" postcss-minify-gradients@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== dependencies: colord "^2.9.1" cssnano-utils "^3.1.0" @@ -9167,7 +10320,8 @@ postcss-minify-gradients@^5.1.1: postcss-minify-params@^5.1.4: version "5.1.4" - resolved "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz#c06a6c787128b3208b38c9364cfc40c8aa5d7352" + integrity sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw== dependencies: browserslist "^4.21.4" cssnano-utils "^3.1.0" @@ -9175,17 +10329,20 @@ postcss-minify-params@^5.1.4: postcss-minify-selectors@^5.2.1: version "5.2.1" - resolved "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== dependencies: postcss-selector-parser "^6.0.5" postcss-modules-extract-imports@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== postcss-modules-local-by-default@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz#b08eb4f083050708998ba2c6061b50c2870ca524" + integrity sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA== dependencies: icss-utils "^5.0.0" postcss-selector-parser "^6.0.2" @@ -9193,86 +10350,100 @@ postcss-modules-local-by-default@^4.0.3: postcss-modules-scope@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== dependencies: postcss-selector-parser "^6.0.4" postcss-modules-values@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== dependencies: icss-utils "^5.0.0" postcss-nested@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" + integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== dependencies: postcss-selector-parser "^6.0.11" postcss-nesting@^10.2.0: version "10.2.0" - resolved "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-10.2.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" + integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== dependencies: "@csstools/selector-specificity" "^2.0.0" postcss-selector-parser "^6.0.10" postcss-normalize-charset@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== postcss-normalize-display-values@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-positions@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-repeat-style@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-string@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-timing-functions@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== dependencies: postcss-value-parser "^4.2.0" postcss-normalize-unicode@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz#f67297fca3fea7f17e0d2caa40769afc487aa030" + integrity sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA== dependencies: browserslist "^4.21.4" postcss-value-parser "^4.2.0" postcss-normalize-url@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== dependencies: normalize-url "^6.0.1" postcss-value-parser "^4.2.0" postcss-normalize-whitespace@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== dependencies: postcss-value-parser "^4.2.0" postcss-normalize@^10.0.1: version "10.0.1" - resolved "https://registry.npmjs.org/postcss-normalize/-/postcss-normalize-10.0.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== dependencies: "@csstools/normalize.css" "*" postcss-browser-comments "^4" @@ -9280,34 +10451,40 @@ postcss-normalize@^10.0.1: postcss-opacity-percentage@^1.1.2: version "1.1.3" - resolved "https://registry.npmjs.org/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.3.tgz#5b89b35551a556e20c5d23eb5260fbfcf5245da6" + integrity sha512-An6Ba4pHBiDtyVpSLymUUERMo2cU7s+Obz6BTrS+gxkbnSBNKSuD0AVUc+CpBMrpVPKKfoVz0WQCX+Tnst0i4A== postcss-ordered-values@^5.1.3: version "5.1.3" - resolved "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== dependencies: cssnano-utils "^3.1.0" postcss-value-parser "^4.2.0" postcss-overflow-shorthand@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" + integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== dependencies: postcss-value-parser "^4.2.0" postcss-page-break@^3.0.4: version "3.0.4" - resolved "https://registry.npmjs.org/postcss-page-break/-/postcss-page-break-3.0.4.tgz" + resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== postcss-place@^7.0.5: version "7.0.5" - resolved "https://registry.npmjs.org/postcss-place/-/postcss-place-7.0.5.tgz" + resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" + integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== dependencies: postcss-value-parser "^4.2.0" postcss-preset-env@^7.0.1: version "7.8.3" - resolved "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-7.8.3.tgz" + resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.8.3.tgz#2a50f5e612c3149cc7af75634e202a5b2ad4f1e2" + integrity sha512-T1LgRm5uEVFSEF83vHZJV2z19lHg4yJuZ6gXZZkqVsqv63nlr6zabMH3l4Pc01FQCyfWVrh2GaUeCVy9Po+Aag== dependencies: "@csstools/postcss-cascade-layers" "^1.1.1" "@csstools/postcss-color-function" "^1.1.1" @@ -9361,67 +10538,78 @@ postcss-preset-env@^7.0.1: postcss-pseudo-class-any-link@^7.1.6: version "7.1.6" - resolved "https://registry.npmjs.org/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz" + resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" + integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== dependencies: postcss-selector-parser "^6.0.10" postcss-reduce-initial@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz#798cd77b3e033eae7105c18c9d371d989e1382d6" + integrity sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg== dependencies: browserslist "^4.21.4" caniuse-api "^3.0.0" postcss-reduce-transforms@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== dependencies: postcss-value-parser "^4.2.0" postcss-replace-overflow-wrap@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== postcss-selector-not@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" + integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== dependencies: postcss-selector-parser "^6.0.10" postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: version "6.0.13" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz#d05d8d76b1e8e173257ef9d60b706a8e5e99bf1b" + integrity sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ== dependencies: cssesc "^3.0.0" util-deprecate "^1.0.2" postcss-svgo@^5.1.0: version "5.1.0" - resolved "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== dependencies: postcss-value-parser "^4.2.0" svgo "^2.7.0" postcss-unique-selectors@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== dependencies: postcss-selector-parser "^6.0.5" postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== postcss@^7.0.35: version "7.0.39" - resolved "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== dependencies: picocolors "^0.2.1" source-map "^0.6.1" postcss@^8.3.5, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.4: - version "8.4.25" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.25.tgz#4a133f5e379eda7f61e906c3b1aaa9b81292726f" + version "8.4.28" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.28.tgz#c6cc681ed00109072816e1557f889ef51cf950a5" + integrity sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw== dependencies: nanoid "^3.3.6" picocolors "^1.0.0" @@ -9429,30 +10617,38 @@ postcss@^8.3.5, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.4: prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz" +prettier-linter-helpers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" + integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + dependencies: + fast-diff "^1.1.2" -prettier@^2.6.1: - version "2.8.8" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" +prettier@2.8.7: + version "2.8.7" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.7.tgz#bb79fc8729308549d28fe3a98fce73d2c0656450" + integrity sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw== pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: version "5.6.0" - resolved "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== pretty-error@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== dependencies: lodash "^4.17.20" renderkid "^3.0.0" pretty-format@^27.0.2, pretty-format@^27.5.1: version "27.5.1" - resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== dependencies: ansi-regex "^5.0.1" ansi-styles "^5.0.0" @@ -9460,16 +10656,18 @@ pretty-format@^27.0.2, pretty-format@^27.5.1: pretty-format@^28.1.3: version "28.1.3" - resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-28.1.3.tgz" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== dependencies: "@jest/schemas" "^28.1.3" ansi-regex "^5.0.1" ansi-styles "^5.0.0" react-is "^18.0.0" -pretty-format@^29.0.0, pretty-format@^29.6.1: - version "29.6.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.6.1.tgz#ec838c288850b7c4f9090b867c2d4f4edbfb0f3e" +pretty-format@^29.0.0, pretty-format@^29.6.2: + version "29.6.2" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.6.2.tgz#3d5829261a8a4d89d8b9769064b29c50ed486a47" + integrity sha512-1q0oC8eRveTg5nnBEWMXAU2qpv65Gnuf2eCQzSjxpWFkPaPARwqZZDGuNE0zPAZfTCHzIk3A8dIjwlQKKLphyg== dependencies: "@jest/schemas" "^29.6.0" ansi-styles "^5.0.0" @@ -9477,24 +10675,28 @@ pretty-format@^29.0.0, pretty-format@^29.6.1: process-nextick-args@~2.0.0: version "2.0.1" - resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== promise@^8.1.0: version "8.3.0" - resolved "https://registry.npmjs.org/promise/-/promise-8.3.0.tgz" + resolved "https://registry.yarnpkg.com/promise/-/promise-8.3.0.tgz#8cb333d1edeb61ef23869fbb8a4ea0279ab60e0a" + integrity sha512-rZPNPKTOYVNEEKFaq1HqTgOwZD+4/YHS5ukLzQCypkj+OkYx7iv0mA91lJlpPPZ8vMau3IIGj5Qlwrx+8iiSmg== dependencies: asap "~2.0.6" prompts@^2.0.1, prompts@^2.4.2: version "2.4.2" - resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" sisteransi "^1.0.5" -prop-types@^15.5.10, prop-types@^15.5.7, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: +prop-types@^15.5.10, prop-types@^15.6.0, prop-types@^15.6.1, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: version "15.8.1" - resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== dependencies: loose-envify "^1.4.0" object-assign "^4.1.1" @@ -9502,76 +10704,104 @@ prop-types@^15.5.10, prop-types@^15.5.7, prop-types@^15.6.0, prop-types@^15.6.1, property-expr@^2.0.4: version "2.0.5" - resolved "https://registry.npmjs.org/property-expr/-/property-expr-2.0.5.tgz" + resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.5.tgz#278bdb15308ae16af3e3b9640024524f4dc02cb4" + integrity sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA== proxy-addr@~2.0.7: version "2.0.7" - resolved "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== dependencies: forwarded "0.2.0" ipaddr.js "1.9.1" psl@^1.1.33: version "1.9.0" - resolved "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== punycode@1.3.2: version "1.3.2" - resolved "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw== + +punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0, punycode@^2.1.1: version "2.3.0" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== q@^1.1.2: version "1.5.1" - resolved "https://registry.npmjs.org/q/-/q-1.5.1.tgz" + resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== qs@6.11.0: version "6.11.0" - resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + dependencies: + side-channel "^1.0.4" + +qs@^6.11.0: + version "6.11.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.2.tgz#64bea51f12c1f5da1bc01496f48ffcff7c69d7d9" + integrity sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== dependencies: side-channel "^1.0.4" querystring@0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== querystring@^0.2.0: version "0.2.1" - resolved "https://registry.npmjs.org/querystring/-/querystring-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd" + integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg== querystringify@^2.1.1: version "2.2.0" - resolved "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== queue-microtask@^1.2.2: version "1.2.3" - resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== quick-lru@^4.0.1: version "4.0.1" - resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" + integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== raf@^3.4.1: version "3.4.1" - resolved "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz" + resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== dependencies: performance-now "^2.1.0" randombytes@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" range-parser@^1.2.1, range-parser@~1.2.1: version "1.2.1" - resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.5.1: version "2.5.1" - resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== dependencies: bytes "3.1.2" http-errors "2.0.0" @@ -9579,14 +10809,16 @@ raw-body@2.5.1: unpipe "1.0.0" react-apexcharts@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/react-apexcharts/-/react-apexcharts-1.4.0.tgz" + version "1.4.1" + resolved "https://registry.yarnpkg.com/react-apexcharts/-/react-apexcharts-1.4.1.tgz#95ab31e4d2201308f59f3d2a4b65d10d9d0ea4bb" + integrity sha512-G14nVaD64Bnbgy8tYxkjuXEUp/7h30Q0U33xc3AwtGFijJB9nHqOt1a6eG0WBn055RgRg+NwqbKGtqPxy15d0Q== dependencies: - prop-types "^15.5.7" + prop-types "^15.8.1" react-app-polyfill@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== dependencies: core-js "^3.19.2" object-assign "^4.1.1" @@ -9598,19 +10830,22 @@ react-app-polyfill@^3.0.0: react-copy-to-clipboard@^5.0.4: version "5.1.0" resolved "https://registry.yarnpkg.com/react-copy-to-clipboard/-/react-copy-to-clipboard-5.1.0.tgz#09aae5ec4c62750ccb2e6421a58725eabc41255c" + integrity sha512-k61RsNgAayIJNoy9yDsYzDe/yAZAzEbEgcz3DZMhF686LEyukcE1hzurxe85JandPUG+yTfGVFzuEw3xt8WP/A== dependencies: copy-to-clipboard "^3.3.1" prop-types "^15.8.1" react-day-picker@^7.0.5: version "7.4.10" - resolved "https://registry.npmjs.org/react-day-picker/-/react-day-picker-7.4.10.tgz" + resolved "https://registry.yarnpkg.com/react-day-picker/-/react-day-picker-7.4.10.tgz#d3928fa65c04379ad28c76de22aa85374a8361e1" + integrity sha512-/QkK75qLKdyLmv0kcVzhL7HoJPazoZXS8a6HixbVoK6vWey1Od1WRLcxfyEiUsRfccAlIlf6oKHShqY2SM82rA== dependencies: prop-types "^15.6.2" react-dev-utils@^12.0.1: version "12.0.1" - resolved "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz" + resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== dependencies: "@babel/code-frame" "^7.16.0" address "^1.1.2" @@ -9639,7 +10874,8 @@ react-dev-utils@^12.0.1: react-dom@^17.0.2: version "17.0.2" - resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== dependencies: loose-envify "^1.1.0" object-assign "^4.1.1" @@ -9648,6 +10884,7 @@ react-dom@^17.0.2: react-dropzone@^12.0.4: version "12.1.0" resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-12.1.0.tgz#e097b37e9da6f9e324efc757b7434ebc6f3dc2cb" + integrity sha512-iBYHA1rbopIvtzokEX4QubO6qk5IF/x3BtKGu74rF2JkQDXnwC4uO/lHKpaw4PJIV6iIAYOlwLv2FpiGyqHNog== dependencies: attr-accept "^2.2.2" file-selector "^0.5.0" @@ -9655,19 +10892,23 @@ react-dropzone@^12.0.4: react-error-overlay@^6.0.11: version "6.0.11" - resolved "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz" + resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== react-fast-compare@^2.0.1: version "2.0.4" - resolved "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-2.0.4.tgz#e84b4d455b0fec113e0402c329352715196f81f9" + integrity sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw== react-fast-compare@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.2.tgz#929a97a532304ce9fee4bcae44234f1ce2c21d49" + integrity sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ== react-helmet-async@^1.2.3: version "1.3.0" resolved "https://registry.yarnpkg.com/react-helmet-async/-/react-helmet-async-1.3.0.tgz#7bd5bf8c5c69ea9f02f6083f14ce33ef545c222e" + integrity sha512-9jZ57/dAn9t3q6hneQS0wukqC2ENOBgMNVEhb/ZG9ZSxUetzVIw4iAmEU38IaVg3QGYauQPhSeUTuIUtFglWpg== dependencies: "@babel/runtime" "^7.12.5" invariant "^2.2.4" @@ -9678,38 +10919,46 @@ react-helmet-async@^1.2.3: react-icons@^4.3.1: version "4.10.1" resolved "https://registry.yarnpkg.com/react-icons/-/react-icons-4.10.1.tgz#3f3b5eec1f63c1796f6a26174a1091ca6437a500" + integrity sha512-/ngzDP/77tlCfqthiiGNZeYFACw85fUjZtLbedmJ5DTlNDIwETxhwBzdOJ21zj4iJdvc0J3y7yOsX3PpxAJzrw== react-if@^4.1.1: version "4.1.5" resolved "https://registry.yarnpkg.com/react-if/-/react-if-4.1.5.tgz#f23f49277779e07240c61bdc7ab12671ff3fc20f" + integrity sha512-Uk+Ub2gC83PAakuU4+7iLdTEP4LPi2ihNEPCtz/vr8SLGbzkMApbpYbkDZ5z9zYXurd0gg+EK/bpOLFFC1r1eQ== react-is@^16.13.1, react-is@^16.7.0, react-is@^16.8.2: version "16.13.1" - resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== react-is@^17.0.1, react-is@^17.0.2: version "17.0.2" - resolved "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== react-is@^18.0.0, react-is@^18.2.0: version "18.2.0" - resolved "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== react-native-get-random-values@^1.4.0: version "1.9.0" - resolved "https://registry.npmjs.org/react-native-get-random-values/-/react-native-get-random-values-1.9.0.tgz" + resolved "https://registry.yarnpkg.com/react-native-get-random-values/-/react-native-get-random-values-1.9.0.tgz#6cb30511c406922e75fe73833dc1812a85bfb37e" + integrity sha512-+29IR2oxzxNVeaRwCqGZ9ABadzMI8SLTBidrIDXPOkKnm5+kEmLt34QKM4JV+d2usPErvKyS85le0OmGTHnyWQ== dependencies: fast-base64-decode "^1.0.0" react-native-url-polyfill@^1.3.0: version "1.3.0" - resolved "https://registry.npmjs.org/react-native-url-polyfill/-/react-native-url-polyfill-1.3.0.tgz" + resolved "https://registry.yarnpkg.com/react-native-url-polyfill/-/react-native-url-polyfill-1.3.0.tgz#c1763de0f2a8c22cc3e959b654c8790622b6ef6a" + integrity sha512-w9JfSkvpqqlix9UjDvJjm1EjSt652zVQ6iwCIj1cVVkwXf4jQhQgTNXY6EVTwuAmUjg6BC6k9RHCBynoLFo3IQ== dependencies: whatwg-url-without-unicode "8.0.0-3" react-redux@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-6.0.1.tgz#0d423e2c1cb10ada87293d47e7de7c329623ba4d" + integrity sha512-T52I52Kxhbqy/6TEfBv85rQSDz6+Y28V/pf52vDWs1YRXG19mcFOGfHnY2HsNFHyhP+ST34Aih98fvt6tqwVcQ== dependencies: "@babel/runtime" "^7.3.1" hoist-non-react-statics "^3.3.0" @@ -9721,6 +10970,7 @@ react-redux@^6.0.1: react-redux@^7.2.6: version "7.2.9" resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.9.tgz#09488fbb9416a4efe3735b7235055442b042481d" + integrity sha512-Gx4L3uM182jEEayZfRbI/G11ZpYdNAnBs70lFVMNdHJI76XYtR+7m0MN+eAs7UHBPhWXcnFPaS+9owSCJQHNpQ== dependencies: "@babel/runtime" "^7.15.4" "@types/react-redux" "^7.1.20" @@ -9731,23 +10981,27 @@ react-redux@^7.2.6: react-refresh@^0.11.0: version "0.11.0" - resolved "https://registry.npmjs.org/react-refresh/-/react-refresh-0.11.0.tgz" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== react-router-dom@6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.0.0.tgz#8a55a9ab3cc12f7eb06472f6c8001515af651c6a" + integrity sha512-bPXyYipf0zu6K7mHSEmNO5YqLKq2q9N+Dsahw9Xh3oq1IirsI3vbnIYcVWin6A0zWyHmKhMGoV7Gr0j0kcuVFg== dependencies: react-router "6.0.0" react-router@6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/react-router/-/react-router-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.0.0.tgz#a8803ac7b612c2a2d31cc466ceda656cf31fdfb9" + integrity sha512-FcTRCihYZvERMNbG54D9+Wkv2cj/OtoxNlA/87D7vxKYlmSmbF9J9XChI9Is44j/behEiOhbovgVZBhKQn+wgA== dependencies: history "^5.0.3" react-scripts@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/react-scripts/-/react-scripts-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== dependencies: "@babel/core" "^7.16.0" "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" @@ -9801,7 +11055,8 @@ react-scripts@^5.0.1: react-transition-group@^4.4.5: version "4.4.5" - resolved "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz" + resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.5.tgz#e53d4e3f3344da8521489fbef8f2581d42becdd1" + integrity sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g== dependencies: "@babel/runtime" "^7.5.5" dom-helpers "^5.0.1" @@ -9810,20 +11065,23 @@ react-transition-group@^4.4.5: react@^17.0.2: version "17.0.2" - resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" + resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== dependencies: loose-envify "^1.1.0" object-assign "^4.1.1" read-cache@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== dependencies: pify "^2.3.0" readable-stream@^2.0.1: version "2.3.8" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== dependencies: core-util-is "~1.0.0" inherits "~2.0.3" @@ -9836,6 +11094,7 @@ readable-stream@^2.0.1: readable-stream@^3.0.6: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" string_decoder "^1.1.1" @@ -9843,19 +11102,22 @@ readable-stream@^3.0.6: readdirp@~3.6.0: version "3.6.0" - resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" recursive-readdir@^2.2.2: version "2.2.3" - resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz" + resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.3.tgz#e726f328c0d69153bcabd5c322d3195252379372" + integrity sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA== dependencies: minimatch "^3.0.5" redent@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== dependencies: indent-string "^4.0.0" strip-indent "^3.0.0" @@ -9863,40 +11125,65 @@ redent@^3.0.0: redux-thunk@^2.3.0, redux-thunk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/redux-thunk/-/redux-thunk-2.4.2.tgz#b9d05d11994b99f7a91ea223e8b04cf0afa5ef3b" + integrity sha512-+P3TjtnP0k/FEjcBL5FZpoovtvrTNT/UXd4/sluaSyrURlSlhLSzEdfsTBW7WsKB6yPvgd7q/iZPICFjW4o57Q== redux@^4.0.0, redux@^4.2.1: version "4.2.1" resolved "https://registry.yarnpkg.com/redux/-/redux-4.2.1.tgz#c08f4306826c49b5e9dc901dee0452ea8fce6197" + integrity sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w== dependencies: "@babel/runtime" "^7.9.2" +reflect.getprototypeof@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.3.tgz#2738fd896fcc3477ffbd4190b40c2458026b6928" + integrity sha512-TTAOZpkJ2YLxl7mVHWrNo3iDMEkYlva/kgFcXndqMgbo/AZUmmavEkdXV+hXtE4P8xdyEKRzalaFqZVuwIk/Nw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + get-intrinsic "^1.1.1" + globalthis "^1.0.3" + which-builtin-type "^1.1.3" + regenerate-unicode-properties@^10.1.0: version "10.1.0" - resolved "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== dependencies: regenerate "^1.4.2" regenerate@^1.4.2: version "1.4.2" - resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.11, regenerator-runtime@^0.13.9: +regenerator-runtime@^0.13.9: version "0.13.11" - resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" + integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== + +regenerator-runtime@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" + integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA== -regenerator-transform@^0.15.1: - version "0.15.1" - resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz" +regenerator-transform@^0.15.2: + version "0.15.2" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" + integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== dependencies: "@babel/runtime" "^7.8.4" regex-parser@^2.2.11: version "2.2.11" - resolved "https://registry.npmjs.org/regex-parser/-/regex-parser-2.2.11.tgz" + resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz#fe7ce25e7e4cca8db37b6634c8a2c7009199b9cb" + integrity sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA== dependencies: call-bind "^1.0.2" define-properties "^1.2.0" @@ -9905,6 +11192,7 @@ regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.0: regexpu-core@^5.3.1: version "5.3.2" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" + integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== dependencies: "@babel/regjsgen" "^0.8.0" regenerate "^1.4.2" @@ -9915,17 +11203,20 @@ regexpu-core@^5.3.1: regjsparser@^0.9.1: version "0.9.1" - resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== dependencies: jsesc "~0.5.0" relateurl@^0.2.7: version "0.2.7" - resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz" + resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== renderkid@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== dependencies: css-select "^4.1.3" dom-converter "^0.2.0" @@ -9935,37 +11226,45 @@ renderkid@^3.0.0: require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== require-from-string@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== requires-port@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== reselect@^4.1.6, reselect@^4.1.8: version "4.1.8" resolved "https://registry.yarnpkg.com/reselect/-/reselect-4.1.8.tgz#3f5dc671ea168dccdeb3e141236f69f02eaec524" + integrity sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ== resolve-cwd@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== dependencies: resolve-from "^5.0.0" resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-from@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== resolve-url-loader@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== dependencies: adjust-sourcemap-loader "^4.0.0" convert-source-map "^1.7.0" @@ -9976,18 +11275,21 @@ resolve-url-loader@^4.0.0: resolve.exports@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.1.tgz#05cfd5b3edf641571fd46fa608b610dda9ead999" + integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== -resolve@^1.1.7, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.2: - version "1.22.2" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" +resolve@^1.1.7, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.2, resolve@^1.22.3, resolve@^1.22.4: + version "1.22.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.4.tgz#1dc40df46554cdaf8948a486a10f6ba1e2026c34" + integrity sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg== dependencies: - is-core-module "^2.11.0" + is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" resolve@^2.0.0-next.4: version "2.0.0-next.4" resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== dependencies: is-core-module "^2.9.0" path-parse "^1.0.7" @@ -9996,28 +11298,34 @@ resolve@^2.0.0-next.4: response-iterator@^0.2.6: version "0.2.6" resolved "https://registry.yarnpkg.com/response-iterator/-/response-iterator-0.2.6.tgz#249005fb14d2e4eeb478a3f735a28fd8b4c9f3da" + integrity sha512-pVzEEzrsg23Sh053rmDUvLSkGXluZio0qu8VT6ukrYuvtjVfCbDZH9d6PGXb8HZfzdNZt8feXv/jvUzlhRgLnw== retry@^0.13.1: version "0.13.1" - resolved "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== reusify@^1.0.4: version "1.0.4" - resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rifm@^0.12.1: version "0.12.1" resolved "https://registry.yarnpkg.com/rifm/-/rifm-0.12.1.tgz#8fa77f45b7f1cda2a0068787ac821f0593967ac4" + integrity sha512-OGA1Bitg/dSJtI/c4dh90svzaUPt228kzFsUkJbtA2c964IqEAwWXeL9ZJi86xWv3j5SMqRvGULl7bA6cK0Bvg== rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" rollup-plugin-terser@^7.0.0: version "7.0.2" - resolved "https://registry.npmjs.org/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz" + resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== dependencies: "@babel/code-frame" "^7.10.4" jest-worker "^26.2.1" @@ -10026,19 +11334,22 @@ rollup-plugin-terser@^7.0.0: rollup@^2.43.1: version "2.79.1" - resolved "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== optionalDependencies: fsevents "~2.3.2" run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" safe-array-concat@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.0.tgz#2064223cba3c08d2ee05148eedbc563cd6d84060" + integrity sha512-9dVEFruWIsnie89yym+xWTAYASdpw3CJV7Li/6zBewGf9z2i1j31rP6jnY0pHEO4QZh6N0K11bFjWmdR8UGdPQ== dependencies: call-bind "^1.0.2" get-intrinsic "^1.2.0" @@ -10047,15 +11358,18 @@ safe-array-concat@^1.0.0: safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-regex-test@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.3" @@ -10063,39 +11377,46 @@ safe-regex-test@^1.0.0: "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": version "2.1.2" - resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sanitize.css@*: version "13.0.0" - resolved "https://registry.npmjs.org/sanitize.css/-/sanitize.css-13.0.0.tgz" + resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== sass-loader@^12.3.0: version "12.6.0" - resolved "https://registry.npmjs.org/sass-loader/-/sass-loader-12.6.0.tgz" + resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== dependencies: klona "^2.0.4" neo-async "^2.6.2" sax@~1.2.4: version "1.2.4" - resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== saxes@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== dependencies: xmlchars "^2.2.0" scheduler@^0.20.2: version "0.20.2" - resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" + integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== dependencies: loose-envify "^1.1.0" object-assign "^4.1.1" schema-utils@2.7.0: version "2.7.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== dependencies: "@types/json-schema" "^7.0.4" ajv "^6.12.2" @@ -10103,7 +11424,8 @@ schema-utils@2.7.0: schema-utils@^2.6.5: version "2.7.1" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== dependencies: "@types/json-schema" "^7.0.5" ajv "^6.12.4" @@ -10112,6 +11434,7 @@ schema-utils@^2.6.5: schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: version "3.3.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: "@types/json-schema" "^7.0.8" ajv "^6.12.5" @@ -10120,6 +11443,7 @@ schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: schema-utils@^4.0.0: version "4.2.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" + integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== dependencies: "@types/json-schema" "^7.0.9" ajv "^8.9.0" @@ -10128,31 +11452,37 @@ schema-utils@^4.0.0: select-hose@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== selfsigned@^2.1.1: version "2.1.1" - resolved "https://registry.npmjs.org/selfsigned/-/selfsigned-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== dependencies: node-forge "^1" semver@^5.5.0: version "5.7.2" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.3.0, semver@^6.3.1: version "6.3.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" send@0.18.0: version "0.18.0" - resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== dependencies: debug "2.6.9" depd "2.0.0" @@ -10170,19 +11500,22 @@ send@0.18.0: serialize-javascript@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== dependencies: randombytes "^2.1.0" serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" + integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== dependencies: randombytes "^2.1.0" serve-index@^1.9.1: version "1.9.1" - resolved "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz" + resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== dependencies: accepts "~1.3.4" batch "0.6.1" @@ -10194,7 +11527,8 @@ serve-index@^1.9.1: serve-static@1.15.0: version "1.15.0" - resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" @@ -10203,43 +11537,52 @@ serve-static@1.15.0: setprototypeof@1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== setprototypeof@1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== shallowequal@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" + integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== shebang-command@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg== dependencies: shebang-regex "^1.0.0" shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" shebang-regex@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== shell-quote@^1.7.3: version "1.8.1" resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== side-channel@^1.0.4: version "1.0.4" - resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== dependencies: call-bind "^1.0.0" get-intrinsic "^1.0.2" @@ -10247,11 +11590,13 @@ side-channel@^1.0.4: signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.7" - resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== simplebar-react@^2.3.6: version "2.4.3" resolved "https://registry.yarnpkg.com/simplebar-react/-/simplebar-react-2.4.3.tgz#79c830711c23a5ae457ef73420f5752d4a1b3133" + integrity sha512-Ep8gqAUZAS5IC2lT5RE4t1ZFUIVACqbrSRQvFV9a6NbVUzXzOMnc4P82Hl8Ak77AnPQvmgUwZS7aUKLyBoMAcg== dependencies: prop-types "^15.6.1" simplebar "^5.3.9" @@ -10259,6 +11604,7 @@ simplebar-react@^2.3.6: simplebar@^5.3.6, simplebar@^5.3.9: version "5.3.9" resolved "https://registry.yarnpkg.com/simplebar/-/simplebar-5.3.9.tgz#168ea0eb6d52f29f03960e40d9b69a1b28cf6318" + integrity sha512-1vIIpjDvY9sVH14e0LGeiCiTFU3ILqAghzO6OI9axeG+mvU/vMSrvXeAXkBolqFFz3XYaY8n5ahH9MeP3sp2Ag== dependencies: "@juggle/resize-observer" "^3.3.1" can-use-dom "^0.1.0" @@ -10269,23 +11615,28 @@ simplebar@^5.3.6, simplebar@^5.3.9: sisteransi@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== size-sensor@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/size-sensor/-/size-sensor-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/size-sensor/-/size-sensor-1.0.1.tgz#f84e46206d3e259faff1d548e4b3beca93219dbb" + integrity sha512-QTy7MnuugCFXIedXRpUSk9gUnyNiaxIdxGfUjr8xxXOqIB3QvBUYP9+b51oCg2C4dnhaeNk/h57TxjbvoJrJUA== slash@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== slash@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== sockjs@^0.3.24: version "0.3.24" - resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz" + resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== dependencies: faye-websocket "^0.11.3" uuid "^8.3.2" @@ -10293,15 +11644,18 @@ sockjs@^0.3.24: source-list-map@^2.0.0, source-list-map@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== source-map-js@^1.0.1, source-map-js@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== source-map-loader@^3.0.0: version "3.0.2" - resolved "https://registry.npmjs.org/source-map-loader/-/source-map-loader-3.0.2.tgz" + resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.2.tgz#af23192f9b344daa729f6772933194cc5fa54fee" + integrity sha512-BokxPoLjyl3iOrgkWaakaxqnelAJSS+0V+De0kKIq6lyWrXuiPgYTGp6z3iHmqljKAaLXwZa+ctD8GccRJeVvg== dependencies: abab "^2.0.5" iconv-lite "^0.6.3" @@ -10309,36 +11663,43 @@ source-map-loader@^3.0.0: source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" - resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@^0.5.7: version "0.5.7" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== source-map@^0.7.3: version "0.7.4" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== source-map@^0.8.0-beta.0: version "0.8.0-beta.0" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.8.0-beta.0.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== dependencies: whatwg-url "^7.0.0" sourcemap-codec@^1.4.8: version "1.4.8" - resolved "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz" + resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== spdy-transport@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== dependencies: debug "^4.1.0" detect-node "^2.0.4" @@ -10349,7 +11710,8 @@ spdy-transport@^3.0.0: spdy@^4.0.2: version "4.0.2" - resolved "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz" + resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== dependencies: debug "^4.1.0" handle-thing "^2.0.0" @@ -10359,61 +11721,73 @@ spdy@^4.0.2: sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== stable@^0.1.8: version "0.1.8" - resolved "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz" + resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== stack-utils@^2.0.3: version "2.0.6" - resolved "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== dependencies: escape-string-regexp "^2.0.0" stackframe@^1.3.4: version "1.3.4" - resolved "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz" + resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== state-local@^1.0.6: version "1.0.7" - resolved "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/state-local/-/state-local-1.0.7.tgz#da50211d07f05748d53009bee46307a37db386d5" + integrity sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w== statuses@2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== "statuses@>= 1.4.0 < 2": version "1.5.0" - resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== stop-iteration-iterator@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz#6a60be0b4ee757d1ed5254858ec66b10c49285e4" + integrity sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ== dependencies: internal-slot "^1.0.4" string-length@^4.0.1: version "4.0.2" - resolved "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== dependencies: char-regex "^1.0.2" strip-ansi "^6.0.0" string-length@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/string-length/-/string-length-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== dependencies: char-regex "^2.0.0" strip-ansi "^7.0.1" string-natural-compare@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/string-natural-compare/-/string-natural-compare-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== string-width@^4.1.0, string-width@^4.2.0: version "4.2.3" - resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" is-fullwidth-code-point "^3.0.0" @@ -10421,7 +11795,8 @@ string-width@^4.1.0, string-width@^4.2.0: string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.8: version "4.0.8" - resolved "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" + integrity sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -10435,6 +11810,7 @@ string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.8: string.prototype.trim@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" + integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -10442,7 +11818,8 @@ string.prototype.trim@^1.2.7: string.prototype.trimend@^1.0.6: version "1.0.6" - resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" + integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -10450,7 +11827,8 @@ string.prototype.trimend@^1.0.6: string.prototype.trimstart@^1.0.6: version "1.0.6" - resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" + integrity sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA== dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -10458,19 +11836,22 @@ string.prototype.trimstart@^1.0.6: string_decoder@^1.1.1: version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" stringify-object@^3.3.0: version "3.3.0" - resolved "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== dependencies: get-own-enumerable-property-symbols "^3.0.0" is-obj "^1.0.1" @@ -10478,64 +11859,77 @@ stringify-object@^3.3.0: strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" strip-bom@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== strip-bom@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== strip-comments@^2.0.1: version "2.0.1" - resolved "https://registry.npmjs.org/strip-comments/-/strip-comments-2.0.1.tgz" + resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== strip-final-newline@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== strip-indent@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== dependencies: min-indent "^1.0.0" -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: +strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== strnum@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" + integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== style-loader@^3.3.1: version "3.3.3" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.3.tgz#bba8daac19930169c0c9c96706749a597ae3acff" + integrity sha512-53BiGLXAcll9maCYtZi2RCQZKa8NQQai5C4horqKyRmHj9H7QmcUyucrH+4KW/gBQbXM2AsB0axoEcFZPlfPcw== stylehacks@^5.1.1: version "5.1.1" - resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.1.tgz" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.1.tgz#7934a34eb59d7152149fa69d6e9e56f2fc34bcc9" + integrity sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw== dependencies: browserslist "^4.21.4" postcss-selector-parser "^6.0.4" stylis@4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.2.0.tgz#79daee0208964c8fe695a42fcffcac633a211a51" + integrity sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw== sucrase@^3.32.0: - version "3.32.0" - resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" + version "3.34.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.34.0.tgz#1e0e2d8fcf07f8b9c3569067d92fbd8690fb576f" + integrity sha512-70/LQEZ07TEcxiU2dz51FKaE6hCTWC6vr7FOk3Gr0U60C3shtAN+H+BFr9XlYe5xqf3RA8nrc+VIwzCfnxuXJw== dependencies: "@jridgewell/gen-mapping" "^0.3.2" commander "^4.0.0" @@ -10547,87 +11941,102 @@ sucrase@^3.32.0: supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^7.0.0, supports-color@^7.1.0: version "7.2.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" supports-color@^8.0.0: version "8.1.1" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: has-flag "^4.0.0" supports-hyperlinks@^2.0.0: version "2.3.0" - resolved "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== dependencies: has-flag "^4.0.0" supports-color "^7.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== svg-parser@^2.0.2: version "2.0.4" - resolved "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== svg.draggable.js@^2.2.2: version "2.2.2" - resolved "https://registry.npmjs.org/svg.draggable.js/-/svg.draggable.js-2.2.2.tgz" + resolved "https://registry.yarnpkg.com/svg.draggable.js/-/svg.draggable.js-2.2.2.tgz#c514a2f1405efb6f0263e7958f5b68fce50603ba" + integrity sha512-JzNHBc2fLQMzYCZ90KZHN2ohXL0BQJGQimK1kGk6AvSeibuKcIdDX9Kr0dT9+UJ5O8nYA0RB839Lhvk4CY4MZw== dependencies: svg.js "^2.0.1" svg.easing.js@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/svg.easing.js/-/svg.easing.js-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/svg.easing.js/-/svg.easing.js-2.0.0.tgz#8aa9946b0a8e27857a5c40a10eba4091e5691f12" + integrity sha512-//ctPdJMGy22YoYGV+3HEfHbm6/69LJUTAqI2/5qBvaNHZ9uUFVC82B0Pl299HzgH13rKrBgi4+XyXXyVWWthA== dependencies: svg.js ">=2.3.x" svg.filter.js@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/svg.filter.js/-/svg.filter.js-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/svg.filter.js/-/svg.filter.js-2.0.2.tgz#91008e151389dd9230779fcbe6e2c9a362d1c203" + integrity sha512-xkGBwU+dKBzqg5PtilaTb0EYPqPfJ9Q6saVldX+5vCRy31P6TlRCP3U9NxH3HEufkKkpNgdTLBJnmhDHeTqAkw== dependencies: svg.js "^2.2.5" svg.js@>=2.3.x, svg.js@^2.0.1, svg.js@^2.2.5, svg.js@^2.4.0, svg.js@^2.6.5: version "2.7.1" - resolved "https://registry.npmjs.org/svg.js/-/svg.js-2.7.1.tgz" + resolved "https://registry.yarnpkg.com/svg.js/-/svg.js-2.7.1.tgz#eb977ed4737001eab859949b4a398ee1bb79948d" + integrity sha512-ycbxpizEQktk3FYvn/8BH+6/EuWXg7ZpQREJvgacqn46gIddG24tNNe4Son6omdXCnSOaApnpZw6MPCBA1dODA== svg.pathmorphing.js@^0.1.3: version "0.1.3" - resolved "https://registry.npmjs.org/svg.pathmorphing.js/-/svg.pathmorphing.js-0.1.3.tgz" + resolved "https://registry.yarnpkg.com/svg.pathmorphing.js/-/svg.pathmorphing.js-0.1.3.tgz#c25718a1cc7c36e852ecabc380e758ac09bb2b65" + integrity sha512-49HWI9X4XQR/JG1qXkSDV8xViuTLIWm/B/7YuQELV5KMOPtXjiwH4XPJvr/ghEDibmLQ9Oc22dpWpG0vUDDNww== dependencies: svg.js "^2.4.0" svg.resize.js@^1.4.3: version "1.4.3" - resolved "https://registry.npmjs.org/svg.resize.js/-/svg.resize.js-1.4.3.tgz" + resolved "https://registry.yarnpkg.com/svg.resize.js/-/svg.resize.js-1.4.3.tgz#885abd248e0cd205b36b973c4b578b9a36f23332" + integrity sha512-9k5sXJuPKp+mVzXNvxz7U0uC9oVMQrrf7cFsETznzUDDm0x8+77dtZkWdMfRlmbkEEYvUn9btKuZ3n41oNA+uw== dependencies: svg.js "^2.6.5" svg.select.js "^2.1.2" svg.select.js@^2.1.2: version "2.1.2" - resolved "https://registry.npmjs.org/svg.select.js/-/svg.select.js-2.1.2.tgz" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-2.1.2.tgz#e41ce13b1acff43a7441f9f8be87a2319c87be73" + integrity sha512-tH6ABEyJsAOVAhwcCjF8mw4crjXSI1aa7j2VQR8ZuJ37H2MBUbyeqYr5nEO7sSN3cy9AR9DUwNg0t/962HlDbQ== dependencies: svg.js "^2.2.5" svg.select.js@^3.0.1: version "3.0.1" - resolved "https://registry.npmjs.org/svg.select.js/-/svg.select.js-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-3.0.1.tgz#a4198e359f3825739226415f82176a90ea5cc917" + integrity sha512-h5IS/hKkuVCbKSieR9uQCj9w+zLHoPh+ce19bBYyqF53g6mnPB8sAtIbe1s9dh2S2fCmYX2xel1Ln3PJBbK4kw== dependencies: svg.js "^2.6.5" svgo@^1.2.2: version "1.3.2" - resolved "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== dependencies: chalk "^2.4.1" coa "^2.0.2" @@ -10645,7 +12054,8 @@ svgo@^1.2.2: svgo@^2.7.0: version "2.8.0" - resolved "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== dependencies: "@trysound/sax" "0.2.0" commander "^7.2.0" @@ -10657,19 +12067,23 @@ svgo@^2.7.0: symbol-observable@^1.0.2: version "1.2.0" - resolved "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" + integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== symbol-observable@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/symbol-observable/-/symbol-observable-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-4.0.0.tgz#5b425f192279e87f2f9b937ac8540d1984b39205" + integrity sha512-b19dMThMV4HVFynSAM1++gBHAbk2Tc/osgLIBZMKsyqh34jb2e8Os7T6ZW/Bt3pJFdBTd2JwAnAAEQV7rSNvcQ== symbol-tree@^3.2.4: version "3.2.4" - resolved "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== tailwindcss@^3.0.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" + version "3.3.3" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.3.tgz#90da807393a2859189e48e9e7000e6880a736daf" + integrity sha512-A0KgSkef7eE4Mf+nKJ83i75TMyq8HqY3qmFIJSWy8bNt0v1lG7jUcpGpoTFxAwYcWOphcTBLPPJg+bDfhDf52w== dependencies: "@alloc/quick-lru" "^5.2.0" arg "^5.0.2" @@ -10691,25 +12105,28 @@ tailwindcss@^3.0.2: postcss-load-config "^4.0.1" postcss-nested "^6.0.1" postcss-selector-parser "^6.0.11" - postcss-value-parser "^4.2.0" resolve "^1.22.2" sucrase "^3.32.0" tapable@^1.0.0: version "1.1.3" - resolved "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: version "2.2.1" - resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== temp-dir@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== tempy@^0.6.0: version "0.6.0" - resolved "https://registry.npmjs.org/tempy/-/tempy-0.6.0.tgz" + resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== dependencies: is-stream "^2.0.0" temp-dir "^2.0.0" @@ -10718,7 +12135,8 @@ tempy@^0.6.0: terminal-link@^2.0.0: version "2.1.1" - resolved "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== dependencies: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" @@ -10726,6 +12144,7 @@ terminal-link@^2.0.0: terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: version "5.3.9" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" + integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== dependencies: "@jridgewell/trace-mapping" "^0.3.17" jest-worker "^27.4.5" @@ -10734,8 +12153,9 @@ terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: terser "^5.16.8" terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: - version "5.18.2" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.18.2.tgz#ff3072a0faf21ffd38f99acc9a0ddf7b5f07b948" + version "5.19.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.19.2.tgz#bdb8017a9a4a8de4663a7983f45c506534f9234e" + integrity sha512-qC5+dmecKJA4cpYxRa5aVkKehYsQKc+AHeKl0Oe62aYjBL8ZA33tTljktDHJSaxxMnbI5ZYw+o/S2DxxLu8OfA== dependencies: "@jridgewell/source-map" "^0.3.3" acorn "^8.8.2" @@ -10744,7 +12164,8 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" glob "^7.1.4" @@ -10752,61 +12173,74 @@ test-exclude@^6.0.0: text-table@^0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== thenify-all@^1.0.0: version "1.6.0" resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== dependencies: thenify ">= 3.1.0 < 4" "thenify@>= 3.1.0 < 4": version "3.3.1" resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== dependencies: any-promise "^1.0.0" throat@^6.0.1: version "6.0.2" - resolved "https://registry.npmjs.org/throat/-/throat-6.0.2.tgz" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.2.tgz#51a3fbb5e11ae72e2cf74861ed5c8020f89f29fe" + integrity sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ== thunky@^1.0.2: version "1.1.0" - resolved "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== tiny-warning@^1.0.2: version "1.0.3" - resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz" + resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" + integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== tmpl@1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" toggle-selection@^1.0.6: version "1.0.6" - resolved "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz" + resolved "https://registry.yarnpkg.com/toggle-selection/-/toggle-selection-1.0.6.tgz#6e45b1263f2017fa0acc7d89d78b15b8bf77da32" + integrity sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ== toidentifier@1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== toposort@^2.0.2: version "2.0.2" - resolved "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" + integrity sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg== tough-cookie@^4.0.0: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" + integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== dependencies: psl "^1.1.33" punycode "^2.1.1" @@ -10815,43 +12249,51 @@ tough-cookie@^4.0.0: tr46@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== dependencies: punycode "^2.1.0" tr46@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== dependencies: punycode "^2.1.1" tr46@~0.0.3: version "0.0.3" - resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== tryer@^1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/tryer/-/tryer-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== ts-interface-checker@^0.1.9: version "0.1.13" resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== ts-invariant@^0.10.3: version "0.10.3" resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.10.3.tgz#3e048ff96e91459ffca01304dbc7f61c1f642f6c" + integrity sha512-uivwYcQaxAucv1CzRp2n/QdYPo4ILf9VXgH19zEIjFx2EJufV16P0JtJVpYHy89DItG6Kwj2oIUjrcK5au+4tQ== dependencies: tslib "^2.1.0" ts-invariant@^0.4.0: version "0.4.4" - resolved "https://registry.npmjs.org/ts-invariant/-/ts-invariant-0.4.4.tgz" + resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.4.4.tgz#97a523518688f93aafad01b0e80eb803eb2abd86" + integrity sha512-uEtWkFM/sdZvRNNDL3Ehu4WVpwaulhwQszV8mrtcdeE8nN00BV9mAmQ88RkrBhFgl9gMgvjJLAQcZbnPXI9mlA== dependencies: tslib "^1.9.3" -tsconfig-paths@^3.14.1: +tsconfig-paths@^3.14.2: version "3.14.2" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" + integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g== dependencies: "@types/json5" "^0.0.29" json5 "^1.0.2" @@ -10860,64 +12302,95 @@ tsconfig-paths@^3.14.1: tslib@2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e" + integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.3: version "1.14.1" - resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.0, tslib@^2.3.1, tslib@^2.5.0: - version "2.5.3" - resolved "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz" - -tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.0.tgz#b295854684dbda164e181d259a22cd779dcd7bc3" +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.3.1, tslib@^2.5.0: + version "2.6.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.1.tgz#fd8c9a0ff42590b25703c0acb3de3d3f4ede0410" + integrity sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig== tsutils@^3.21.0: version "3.21.0" - resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz" - dependencies: - prelude-ls "~1.1.2" - type-detect@4.0.8: version "4.0.8" - resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== type-fest@^0.16.0: version "0.16.0" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.21.3: version "0.21.3" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-is@~1.6.18: version "1.6.18" - resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" mime-types "~2.1.24" +typed-array-buffer@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" + integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.2.1" + is-typed-array "^1.1.10" + +typed-array-byte-length@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" + integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== + dependencies: + call-bind "^1.0.2" + for-each "^0.3.3" + has-proto "^1.0.1" + is-typed-array "^1.1.10" + +typed-array-byte-offset@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" + integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + has-proto "^1.0.1" + is-typed-array "^1.1.10" + typed-array-length@^1.0.4: version "1.0.4" - resolved "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz" + resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" + integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== dependencies: call-bind "^1.0.2" for-each "^0.3.3" @@ -10925,17 +12398,20 @@ typed-array-length@^1.0.4: typedarray-to-buffer@^3.1.5: version "3.1.5" - resolved "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== dependencies: is-typedarray "^1.0.0" ulid@2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/ulid/-/ulid-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/ulid/-/ulid-2.3.0.tgz#93063522771a9774121a84d126ecd3eb9804071f" + integrity sha512-keqHubrlpvT6G2wH0OEfSW4mquYRcbe/J8NMmveoQOjUqmo+hXtO+ORCpWhdbZ7k72UtY61BL7haGxW6enBnjw== unbox-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== dependencies: call-bind "^1.0.2" has-bigints "^1.0.2" @@ -10944,102 +12420,130 @@ unbox-primitive@^1.0.2: unfetch@^4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/unfetch/-/unfetch-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/unfetch/-/unfetch-4.2.0.tgz#7e21b0ef7d363d8d9af0fb929a5555f6ef97a3be" + integrity sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA== unicode-canonical-property-names-ecmascript@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== unicode-match-property-ecmascript@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== dependencies: unicode-canonical-property-names-ecmascript "^2.0.0" unicode-property-aliases-ecmascript "^2.0.0" unicode-match-property-value-ecmascript@^2.1.0: version "2.1.0" - resolved "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" + integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== unicode-property-aliases-ecmascript@^2.0.0: version "2.1.0" - resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== unique-string@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== dependencies: crypto-random-string "^2.0.0" universal-cookie@^4.0.4: version "4.0.4" - resolved "https://registry.npmjs.org/universal-cookie/-/universal-cookie-4.0.4.tgz" + resolved "https://registry.yarnpkg.com/universal-cookie/-/universal-cookie-4.0.4.tgz#06e8b3625bf9af049569ef97109b4bb226ad798d" + integrity sha512-lbRVHoOMtItjWbM7TwDLdl8wug7izB0tq3/YVKhT/ahB4VDvWMyvnADfnJI8y6fSvsjh51Ix7lTGC6Tn4rMPhw== dependencies: "@types/cookie" "^0.3.3" cookie "^0.4.0" universalify@^0.2.0: version "0.2.0" - resolved "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== universalify@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== unquote@~1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz" + resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== upath@^1.2.0: version "1.2.0" - resolved "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== update-browserslist-db@^1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" + integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== dependencies: escalade "^3.1.1" picocolors "^1.0.0" uri-js@^4.2.2: version "4.4.1" - resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" url-parse@^1.5.3: version "1.5.10" - resolved "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" url-parser-lite@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/url-parser-lite/-/url-parser-lite-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/url-parser-lite/-/url-parser-lite-0.1.0.tgz#4679720fd7448d42357d1c8c0a6ece95174e864e" + integrity sha512-k8eUA7I5qfH6c1ZI9CvdHEk+YH1KroX2ry+FF9k6yJBl7AmDWen2WI+xNzbCBAek6JEvgPBoHub4v8aZIM7Jqw== url-search-params-polyfill@^7.0.0: version "7.0.1" - resolved "https://registry.npmjs.org/url-search-params-polyfill/-/url-search-params-polyfill-7.0.1.tgz" + resolved "https://registry.yarnpkg.com/url-search-params-polyfill/-/url-search-params-polyfill-7.0.1.tgz#b900cd9a0d9d2ff757d500135256f2344879cbff" + integrity sha512-bAw7L2E+jn9XHG5P9zrPnHdO0yJub4U+yXJOdpcpkr7OBd9T8oll4lUos0iSGRcDvfZoLUKfx9a6aNmIhJ4+mQ== -url@0.11.0, url@^0.11.0: +url@0.11.0: version "0.11.0" - resolved "https://registry.npmjs.org/url/-/url-0.11.0.tgz" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + integrity sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ== dependencies: punycode "1.3.2" querystring "0.2.0" +url@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.1.tgz#26f90f615427eca1b9f4d6a28288c147e2302a32" + integrity sha512-rWS3H04/+mzzJkv0eZ7vEDGiQbgquI1fGfOad6zKvgYQi1SzMmhl7c/DdRGxhaWrVH6z0qWITo8rpnxK/RfEhA== + dependencies: + punycode "^1.4.1" + qs "^6.11.0" + util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" - resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== util.promisify@~1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== dependencies: define-properties "^1.1.3" es-abstract "^1.17.2" @@ -11048,23 +12552,28 @@ util.promisify@~1.0.0: utila@~0.4: version "0.4.0" - resolved "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz" + resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== utils-merge@1.0.1: version "1.0.1" - resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== uuid@3.4.0, uuid@^3.0.0, uuid@^3.2.1: version "3.4.0" - resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== uuid@^8.3.2: version "8.3.2" - resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== v8-to-istanbul@^8.1.0: version "8.1.1" - resolved "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== dependencies: "@types/istanbul-lib-coverage" "^2.0.1" convert-source-map "^1.6.0" @@ -11072,62 +12581,82 @@ v8-to-istanbul@^8.1.0: vary@~1.1.2: version "1.1.2" - resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== w3c-hr-time@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== dependencies: browser-process-hrtime "^1.0.0" w3c-xmlserializer@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== dependencies: xml-name-validator "^3.0.0" walker@^1.0.7: version "1.0.8" - resolved "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== dependencies: makeerror "1.0.12" +watch@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/watch/-/watch-1.0.2.tgz#340a717bde765726fa0aa07d721e0147a551df0c" + integrity sha512-1u+Z5n9Jc1E2c7qDO8SinPoZuHj7FgbgU1olSFoyaklduDvvtX7GMMtlE6OC9FTXq4KvNAOfj6Zu4vI1e9bAKA== + dependencies: + exec-sh "^0.2.0" + minimist "^1.2.0" + watchpack@^2.4.0: version "2.4.0" - resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" wbuf@^1.1.0, wbuf@^1.7.3: version "1.7.3" - resolved "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz" + resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== dependencies: minimalistic-assert "^1.0.0" web-vitals@^2.1.4: version "2.1.4" - resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-2.1.4.tgz" + resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" + integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== webidl-conversions@^3.0.0: version "3.0.1" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webidl-conversions@^4.0.2: version "4.0.2" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== webidl-conversions@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== webidl-conversions@^6.1.0: version "6.1.0" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-6.1.0.tgz" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== webpack-dev-middleware@^5.3.1: version "5.3.3" - resolved "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== dependencies: colorette "^2.0.10" memfs "^3.4.3" @@ -11138,6 +12667,7 @@ webpack-dev-middleware@^5.3.1: webpack-dev-server@^4.6.0: version "4.15.1" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz#8944b29c12760b3a45bdaa70799b17cb91b03df7" + integrity sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA== dependencies: "@types/bonjour" "^3.5.9" "@types/connect-history-api-fallback" "^1.3.5" @@ -11172,32 +12702,37 @@ webpack-dev-server@^4.6.0: webpack-manifest-plugin@^4.0.2: version "4.1.1" - resolved "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz" + resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== dependencies: tapable "^2.0.0" webpack-sources "^2.2.0" webpack-sources@^1.4.3: version "1.4.3" - resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== dependencies: source-list-map "^2.0.0" source-map "~0.6.1" webpack-sources@^2.2.0: version "2.3.1" - resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-2.3.1.tgz" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== dependencies: source-list-map "^2.0.1" source-map "^0.6.1" webpack-sources@^3.2.3: version "3.2.3" - resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.88.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.1.tgz#21eba01e81bd5edff1968aea726e2fbfd557d3f8" + version "5.88.2" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.2.tgz#f62b4b842f1c6ff580f3fcb2ed4f0b579f4c210e" + integrity sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ== dependencies: "@types/eslint-scope" "^3.7.3" "@types/estree" "^1.0.0" @@ -11226,7 +12761,8 @@ webpack@^5.64.4: websocket-driver@>=0.5.1, websocket-driver@^0.7.4: version "0.7.4" - resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" + resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== dependencies: http-parser-js ">=0.5.1" safe-buffer ">=5.1.0" @@ -11234,25 +12770,30 @@ websocket-driver@>=0.5.1, websocket-driver@^0.7.4: websocket-extensions@>=0.1.1: version "0.1.4" - resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== whatwg-encoding@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== dependencies: iconv-lite "0.4.24" whatwg-fetch@^3.6.2: - version "3.6.2" - resolved "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz" + version "3.6.17" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.17.tgz#009bbbfc122b227b74ba1ff31536b3a1a0e0e212" + integrity sha512-c4ghIvG6th0eudYwKZY5keb81wtFz9/WeAHAoy8+r18kcWlitUIrmGFQ2rWEl4UCKUilD3zCLHOIPheHx5ypRQ== whatwg-mimetype@^2.3.0: version "2.3.0" - resolved "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== whatwg-url-without-unicode@8.0.0-3: version "8.0.0-3" - resolved "https://registry.npmjs.org/whatwg-url-without-unicode/-/whatwg-url-without-unicode-8.0.0-3.tgz" + resolved "https://registry.yarnpkg.com/whatwg-url-without-unicode/-/whatwg-url-without-unicode-8.0.0-3.tgz#ab6df4bf6caaa6c85a59f6e82c026151d4bb376b" + integrity sha512-HoKuzZrUlgpz35YO27XgD28uh/WJH4B0+3ttFqRo//lmq+9T/mIOJ6kqmINI9HpUpz1imRC/nR/lxKpJiv0uig== dependencies: buffer "^5.4.3" punycode "^2.1.1" @@ -11260,14 +12801,16 @@ whatwg-url-without-unicode@8.0.0-3: whatwg-url@^5.0.0: version "5.0.0" - resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" whatwg-url@^7.0.0: version "7.1.0" - resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== dependencies: lodash.sortby "^4.7.0" tr46 "^1.0.1" @@ -11275,7 +12818,8 @@ whatwg-url@^7.0.0: whatwg-url@^8.0.0, whatwg-url@^8.5.0: version "8.7.0" - resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== dependencies: lodash "^4.7.0" tr46 "^2.1.0" @@ -11283,7 +12827,8 @@ whatwg-url@^8.0.0, whatwg-url@^8.5.0: which-boxed-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== dependencies: is-bigint "^1.0.1" is-boolean-object "^1.1.0" @@ -11291,45 +12836,63 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" +which-builtin-type@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.1.3.tgz#b1b8443707cc58b6e9bf98d32110ff0c2cbd029b" + integrity sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw== + dependencies: + function.prototype.name "^1.1.5" + has-tostringtag "^1.0.0" + is-async-function "^2.0.0" + is-date-object "^1.0.5" + is-finalizationregistry "^1.0.2" + is-generator-function "^1.0.10" + is-regex "^1.1.4" + is-weakref "^1.0.2" + isarray "^2.0.5" + which-boxed-primitive "^1.0.2" + which-collection "^1.0.1" + which-typed-array "^1.1.9" + which-collection@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" + integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== dependencies: is-map "^2.0.1" is-set "^2.0.1" is-weakmap "^2.0.1" is-weakset "^2.0.1" -which-typed-array@^1.1.9: - version "1.1.9" - resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz" +which-typed-array@^1.1.10, which-typed-array@^1.1.11, which-typed-array@^1.1.9: + version "1.1.11" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.11.tgz#99d691f23c72aab6768680805a271b69761ed61a" + integrity sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew== dependencies: available-typed-arrays "^1.0.5" call-bind "^1.0.2" for-each "^0.3.3" gopd "^1.0.1" has-tostringtag "^1.0.0" - is-typed-array "^1.1.10" which@^1.2.9, which@^1.3.1: version "1.3.1" - resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" which@^2.0.1: version "2.0.2" - resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -word-wrap@~1.2.3: - version "1.2.3" - resolved "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz" - workbox-background-sync@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.6.1.tgz#08d603a33717ce663e718c30cc336f74909aff2f" + integrity sha512-trJd3ovpWCvzu4sW0E8rV3FUyIcC0W8G+AZ+VcqzzA890AsWZlUGOTSxIMmIHVusUw/FDq1HFWfy/kC/WTRqSg== dependencies: idb "^7.0.1" workbox-core "6.6.1" @@ -11337,12 +12900,14 @@ workbox-background-sync@6.6.1: workbox-broadcast-update@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.6.1.tgz#0fad9454cf8e4ace0c293e5617c64c75d8a8c61e" + integrity sha512-fBhffRdaANdeQ1V8s692R9l/gzvjjRtydBOvR6WCSB0BNE2BacA29Z4r9/RHd9KaXCPl6JTdI9q0bR25YKP8TQ== dependencies: workbox-core "6.6.1" workbox-build@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.6.1.tgz#6010e9ce550910156761448f2dbea8cfcf759cb0" + integrity sha512-INPgDx6aRycAugUixbKgiEQBWD0MPZqU5r0jyr24CehvNuLPSXp/wGOpdRJmts656lNiXwqV7dC2nzyrzWEDnw== dependencies: "@apideck/better-ajv-errors" "^0.3.1" "@babel/core" "^7.11.1" @@ -11385,16 +12950,19 @@ workbox-build@6.6.1: workbox-cacheable-response@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.6.1.tgz#284c2b86be3f4fd191970ace8c8e99797bcf58e9" + integrity sha512-85LY4veT2CnTCDxaVG7ft3NKaFbH6i4urZXgLiU4AiwvKqS2ChL6/eILiGRYXfZ6gAwDnh5RkuDbr/GMS4KSag== dependencies: workbox-core "6.6.1" workbox-core@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.6.1.tgz#7184776d4134c5ed2f086878c882728fc9084265" + integrity sha512-ZrGBXjjaJLqzVothoE12qTbVnOAjFrHDXpZe7coCb6q65qI/59rDLwuFMO4PcZ7jcbxY+0+NhUVztzR/CbjEFw== workbox-expiration@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.6.1.tgz#a841fa36676104426dbfb9da1ef6a630b4f93739" + integrity sha512-qFiNeeINndiOxaCrd2DeL1Xh1RFug3JonzjxUHc5WkvkD2u5abY3gZL1xSUNt3vZKsFFGGORItSjVTVnWAZO4A== dependencies: idb "^7.0.1" workbox-core "6.6.1" @@ -11402,6 +12970,7 @@ workbox-expiration@6.6.1: workbox-google-analytics@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.6.1.tgz#a07a6655ab33d89d1b0b0a935ffa5dea88618c5d" + integrity sha512-1TjSvbFSLmkpqLcBsF7FuGqqeDsf+uAXO/pjiINQKg3b1GN0nBngnxLcXDYo1n/XxK4N7RaRrpRlkwjY/3ocuA== dependencies: workbox-background-sync "6.6.1" workbox-core "6.6.1" @@ -11411,12 +12980,14 @@ workbox-google-analytics@6.6.1: workbox-navigation-preload@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.6.1.tgz#61a34fe125558dd88cf09237f11bd966504ea059" + integrity sha512-DQCZowCecO+wRoIxJI2V6bXWK6/53ff+hEXLGlQL4Rp9ZaPDLrgV/32nxwWIP7QpWDkVEtllTAK5h6cnhxNxDA== dependencies: workbox-core "6.6.1" workbox-precaching@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.6.1.tgz#dedeeba10a2d163d990bf99f1c2066ac0d1a19e2" + integrity sha512-K4znSJ7IKxCnCYEdhNkMr7X1kNh8cz+mFgx9v5jFdz1MfI84pq8C2zG+oAoeE5kFrUf7YkT5x4uLWBNg0DVZ5A== dependencies: workbox-core "6.6.1" workbox-routing "6.6.1" @@ -11425,12 +12996,14 @@ workbox-precaching@6.6.1: workbox-range-requests@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.6.1.tgz#ddaf7e73af11d362fbb2f136a9063a4c7f507a39" + integrity sha512-4BDzk28govqzg2ZpX0IFkthdRmCKgAKreontYRC5YsAPB2jDtPNxqx3WtTXgHw1NZalXpcH/E4LqUa9+2xbv1g== dependencies: workbox-core "6.6.1" workbox-recipes@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.6.1.tgz#ea70d2b2b0b0bce8de0a9d94f274d4a688e69fae" + integrity sha512-/oy8vCSzromXokDA+X+VgpeZJvtuf8SkQ8KL0xmRivMgJZrjwM3c2tpKTJn6PZA6TsbxGs3Sc7KwMoZVamcV2g== dependencies: workbox-cacheable-response "6.6.1" workbox-core "6.6.1" @@ -11442,18 +13015,21 @@ workbox-recipes@6.6.1: workbox-routing@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.6.1.tgz#cba9a1c7e0d1ea11e24b6f8c518840efdc94f581" + integrity sha512-j4ohlQvfpVdoR8vDYxTY9rA9VvxTHogkIDwGdJ+rb2VRZQ5vt1CWwUUZBeD/WGFAni12jD1HlMXvJ8JS7aBWTg== dependencies: workbox-core "6.6.1" workbox-strategies@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.6.1.tgz#38d0f0fbdddba97bd92e0c6418d0b1a2ccd5b8bf" + integrity sha512-WQLXkRnsk4L81fVPkkgon1rZNxnpdO5LsO+ws7tYBC6QQQFJVI6v98klrJEjFtZwzw/mB/HT5yVp7CcX0O+mrw== dependencies: workbox-core "6.6.1" workbox-streams@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.6.1.tgz#b2f7ba7b315c27a6e3a96a476593f99c5d227d26" + integrity sha512-maKG65FUq9e4BLotSKWSTzeF0sgctQdYyTMq529piEN24Dlu9b6WhrAfRpHdCncRS89Zi2QVpW5V33NX8PgH3Q== dependencies: workbox-core "6.6.1" workbox-routing "6.6.1" @@ -11461,10 +13037,12 @@ workbox-streams@6.6.1: workbox-sw@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.6.1.tgz#d4c4ca3125088e8b9fd7a748ed537fa0247bd72c" + integrity sha512-R7whwjvU2abHH/lR6kQTTXLHDFU2izht9kJOvBRYK65FbwutT4VvnUAJIgHvfWZ/fokrOPhfoWYoPCMpSgUKHQ== workbox-webpack-plugin@^6.4.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.6.1.tgz#4f81cc1ad4e5d2cd7477a86ba83c84ee2d187531" + integrity sha512-zpZ+ExFj9NmiI66cFEApyjk7hGsfJ1YMOaLXGXBoZf0v7Iu6hL0ZBe+83mnDq3YYWAfA3fnyFejritjOHkFcrA== dependencies: fast-json-stable-stringify "^2.1.0" pretty-bytes "^5.4.1" @@ -11475,13 +13053,15 @@ workbox-webpack-plugin@^6.4.1: workbox-window@6.6.1: version "6.6.1" resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.6.1.tgz#f22a394cbac36240d0dadcbdebc35f711bb7b89e" + integrity sha512-wil4nwOY58nTdCvif/KEZjQ2NP8uk3gGeRNy2jPBbzypU4BT4D9L8xiwbmDBpZlSgJd2xsT9FvSNU0gsxV51JQ== dependencies: "@types/trusted-types" "^2.0.2" workbox-core "6.6.1" wrap-ansi@^7.0.0: version "7.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" string-width "^4.1.0" @@ -11489,11 +13069,13 @@ wrap-ansi@^7.0.0: wrappy@1: version "1.0.2" - resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^3.0.0: version "3.0.3" - resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== dependencies: imurmurhash "^0.1.4" is-typedarray "^1.0.0" @@ -11502,54 +13084,66 @@ write-file-atomic@^3.0.0: ws@^7.4.6: version "7.5.9" - resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== ws@^8.13.0: version "8.13.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== xml-name-validator@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== xmlchars@^2.2.0: version "2.2.0" - resolved "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== xss@^1.0.11: version "1.0.14" - resolved "https://registry.npmjs.org/xss/-/xss-1.0.14.tgz" + resolved "https://registry.yarnpkg.com/xss/-/xss-1.0.14.tgz#4f3efbde75ad0d82e9921cc3c95e6590dd336694" + integrity sha512-og7TEJhXvn1a7kzZGQ7ETjdQVS2UfZyTlsEdDOqvQF7GoxNfY+0YLCzBy1kPdsDDx4QuNAonQPddpsn6Xl/7sw== dependencies: commander "^2.20.3" cssfilter "0.0.10" y18n@^5.0.5: version "5.0.8" - resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yallist@^3.0.2: version "3.1.1" - resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: version "1.10.2" - resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== yaml@^2.1.1: version "2.3.1" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" + integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== yargs-parser@^20.2.2: version "20.2.9" - resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs@^16.2.0: version "16.2.0" - resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== dependencies: cliui "^7.0.2" escalade "^3.1.1" @@ -11561,11 +13155,13 @@ yargs@^16.2.0: yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== yup@^0.32.11: version "0.32.11" - resolved "https://registry.npmjs.org/yup/-/yup-0.32.11.tgz" + resolved "https://registry.yarnpkg.com/yup/-/yup-0.32.11.tgz#d67fb83eefa4698607982e63f7ca4c5ed3cf18c5" + integrity sha512-Z2Fe1bn+eLstG8DRR6FTavGD+MeAwyfmouhHsIUgaADz8jvFKbO/fXc2trJKZg+5EBjh4gGm3iU/t3onKlXHIg== dependencies: "@babel/runtime" "^7.15.4" "@types/lodash" "^4.14.175" @@ -11577,14 +13173,16 @@ yup@^0.32.11: zen-observable-ts@0.8.19: version "0.8.19" - resolved "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-0.8.19.tgz" + resolved "https://registry.yarnpkg.com/zen-observable-ts/-/zen-observable-ts-0.8.19.tgz#c094cd20e83ddb02a11144a6e2a89706946b5694" + integrity sha512-u1a2rpE13G+jSzrg3aiCqXU5tN2kw41b+cBZGmnc+30YimdkKiDj9bTowcB41eL77/17RF/h+393AuVgShyheQ== dependencies: tslib "^1.9.3" zen-observable "^0.8.0" zen-observable-ts@^0.8.21: version "0.8.21" - resolved "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-0.8.21.tgz" + resolved "https://registry.yarnpkg.com/zen-observable-ts/-/zen-observable-ts-0.8.21.tgz#85d0031fbbde1eba3cd07d3ba90da241215f421d" + integrity sha512-Yj3yXweRc8LdRMrCC8nIc4kkjWecPAUVh0TI0OUrWXx6aX790vLcDlWca6I4vsyCGH3LpWxq0dJRcMOFoVqmeg== dependencies: tslib "^1.9.3" zen-observable "^0.8.0" @@ -11592,25 +13190,30 @@ zen-observable-ts@^0.8.21: zen-observable-ts@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/zen-observable-ts/-/zen-observable-ts-1.2.5.tgz#6c6d9ea3d3a842812c6e9519209365a122ba8b58" + integrity sha512-QZWQekv6iB72Naeake9hS1KxHlotfRpe+WGNbNx5/ta+R3DNjVO2bswf63gXlWDcs+EMd7XY8HfVQyP1X6T4Zg== dependencies: zen-observable "0.8.15" zen-observable@0.8.15, zen-observable@^0.8.0: version "0.8.15" - resolved "https://registry.npmjs.org/zen-observable/-/zen-observable-0.8.15.tgz" + resolved "https://registry.yarnpkg.com/zen-observable/-/zen-observable-0.8.15.tgz#96415c512d8e3ffd920afd3889604e30b9eaac15" + integrity sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ== zen-observable@^0.7.0: version "0.7.1" - resolved "https://registry.npmjs.org/zen-observable/-/zen-observable-0.7.1.tgz" + resolved "https://registry.yarnpkg.com/zen-observable/-/zen-observable-0.7.1.tgz#f84075c0ee085594d3566e1d6454207f126411b3" + integrity sha512-OI6VMSe0yeqaouIXtedC+F55Sr6r9ppS7+wTbSexkYdHbdt4ctTuPNXP/rwm7GTVI63YBc+EBT0b0tl7YnJLRg== zen-push@0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/zen-push/-/zen-push-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/zen-push/-/zen-push-0.2.1.tgz#ddc33b90f66f9a84237d5f1893970f6be60c3c28" + integrity sha512-Qv4qvc8ZIue51B/0zmeIMxpIGDVhz4GhJALBvnKs/FRa2T7jy4Ori9wFwaHVt0zWV7MIFglKAHbgnVxVTw7U1w== dependencies: zen-observable "^0.7.0" -zrender@5.4.3: - version "5.4.3" - resolved "https://registry.npmjs.org/zrender/-/zrender-5.4.3.tgz" +zrender@5.4.4: + version "5.4.4" + resolved "https://registry.yarnpkg.com/zrender/-/zrender-5.4.4.tgz#8854f1d95ecc82cf8912f5a11f86657cb8c9e261" + integrity sha512-0VxCNJ7AGOMCWeHVyTrGzUgrK4asT4ml9PEkeGirAkKNYXYzoPJCLvmyfdoOXcjTHPs10OZVMfD1Rwg16AZyYw== dependencies: tslib "2.3.0" diff --git a/template_cdk.json b/template_cdk.json index 9cbd28eb2..2b93ff8a5 100644 --- a/template_cdk.json +++ b/template_cdk.json @@ -2,7 +2,7 @@ "app": "python ./deploy/app.py", "context": { "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": false, - "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": false, + "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, "@aws-cdk/aws-rds:lowercaseDbIdentifier": false, "@aws-cdk/core:stackRelativeExports": false, "tooling_region": "string_TOOLING_REGION|DEFAULT=eu-west-1", diff --git a/tests/api/client.py b/tests/api/client.py deleted file mode 100644 index 20dbb8c22..000000000 --- a/tests/api/client.py +++ /dev/null @@ -1,121 +0,0 @@ -import random -import typing -import json -import pytest -from ariadne import graphql_sync -from ariadne.constants import PLAYGROUND_HTML -from flask import Flask, request, jsonify, Response -from munch import DefaultMunch -import dataall - - -class ClientWrapper: - def __init__(self, cli): - self.client = cli - - def query( - self, - query: str, - username: str = 'test', - groups: typing.List[str] = ['-'], - **variables, - ): - response: Response = self.client.post( - '/graphql', - json={'query': f""" {query} """, 'variables': variables}, - headers={'groups': json.dumps(groups), 'username': username}, - ) - return DefaultMunch.fromDict(response.get_json()) - - -@pytest.fixture(scope='module', autouse=True) -def app(db, es): - app = Flask('tests') - schema = dataall.api.get_executable_schema() - - @app.route('/', methods=['OPTIONS']) - def opt(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 - - @app.route('/graphql', methods=['GET']) - def graphql_playground(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return PLAYGROUND_HTML, 200 - - @app.route('/graphql', methods=['POST']) - def graphql_server(): - # GraphQL queries are always sent as POST - # Note: Passing the request to the context is optional. - # In Flask, the current request is always accessible as flask.request - data = request.get_json() - - username = request.headers.get('Username', 'anonym') - groups = json.loads(request.headers.get('Groups', '[]')) - success, result = graphql_sync( - schema, - data, - context_value={ - 'schema': None, - 'engine': db, - 'username': username, - 'groups': groups, - 'es': es, - 'cdkproxyurl': 'cdkproxyurl', - }, - debug=app.debug, - ) - - status_code = 200 if success else 400 - return jsonify(result), status_code - - yield app - - -@pytest.fixture(scope='module') -def client(app) -> ClientWrapper: - with app.test_client() as client: - yield ClientWrapper(client) - - -def deprecated(fn): - def wrapper(*args, **kwargs): - print(fn.__name__, 'is deprecated') - - return wrapper - - -def random_email(): - names = ['andy', 'bill', 'satya', 'sundar'] - corps = ['google.com', 'amazon.com', 'microsoft.com'] - return f'{random.choice(names)}@{random.choice(corps)}' - - -def random_emails(): - emails = [] - for i in range(1, 2 + random.choice([2, 3, 4])): - emails.append(random_email()) - return emails - - -def random_group(): - prefixes = ['big', 'small', 'pretty', 'shiny'] - names = ['team', 'people', 'group'] - lands = ['snow', 'ice', 'green', 'high'] - return f'{random.choice(prefixes).capitalize()}{random.choice(names).capitalize()}From{random.choice(lands).capitalize()}land' - - -def random_tag(): - return random.choice( - ['sales', 'finances', 'sites', 'people', 'products', 'partners', 'operations'] - ) - - -def random_tags(): - return [random_tag() for i in range(1, random.choice([2, 3, 4, 5]))] diff --git a/tests/api/conftest.py b/tests/api/conftest.py deleted file mode 100644 index cde8a5a25..000000000 --- a/tests/api/conftest.py +++ /dev/null @@ -1,816 +0,0 @@ -from .client import * -from dataall.db import models -from dataall.api import constants - - -@pytest.fixture(scope='module', autouse=True) -def patch_request(module_mocker): - """we will mock requests.post so no call to cdk proxy will be made""" - module_mocker.patch('requests.post', return_value=True) - - -@pytest.fixture(scope='module', autouse=True) -def patch_check_env(module_mocker): - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', - return_value='CDKROLENAME', - ) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False - ) - - -@pytest.fixture(scope='module', autouse=True) -def patch_check_dataset(module_mocker): - module_mocker.patch( - 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True - ) - - -@pytest.fixture(scope='module', autouse=True) -def patch_es(module_mocker): - module_mocker.patch('dataall.searchproxy.connect', return_value={}) - module_mocker.patch('dataall.searchproxy.search', return_value={}) - module_mocker.patch('dataall.searchproxy.upsert', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset_tables', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dataset', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_table', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_folder', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.upsert_dashboard', return_value={}) - module_mocker.patch('dataall.searchproxy.indexers.delete_doc', return_value={}) - - -@pytest.fixture(scope='module', autouse=True) -def patch_stack_tasks(module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.ecs.Ecs.is_task_running', - return_value=False, - ) - module_mocker.patch( - 'dataall.aws.handlers.ecs.Ecs.run_cdkproxy_task', - return_value='arn:aws:eu-west-1:xxxxxxxx:ecs:task/1222222222', - ) - module_mocker.patch( - 'dataall.aws.handlers.cloudformation.CloudFormation.describe_stack_resources', - return_value=True, - ) - - -@pytest.fixture(scope='module', autouse=True) -def permissions(db): - with db.scoped_session() as session: - yield dataall.db.api.Permission.init_permissions(session) - - -@pytest.fixture(scope='module', autouse=True) -def user(db): - with db.scoped_session() as session: - user = dataall.db.models.User(userId='alice@test.com', userName='alice') - session.add(user) - yield user - - -@pytest.fixture(scope='module') -def group(db, user): - with db.scoped_session() as session: - group = dataall.db.models.Group(name='testadmins', label='testadmins', owner='alice') - session.add(group) - session.commit() - member = dataall.db.models.GroupMember( - userName=user.userName, - groupUri=group.groupUri, - ) - session.add(member) - session.commit() - yield group - - -@pytest.fixture(scope='module', autouse=True) -def user2(db): - with db.scoped_session() as session: - user = dataall.db.models.User(userId='bob@test.com', userName='bob') - session.add(user) - yield user - - -@pytest.fixture(scope='module') -def group2(db, user2): - with db.scoped_session() as session: - group = dataall.db.models.Group(name='dataengineers', label='dataengineers', owner=user2.userName) - session.add(group) - session.commit() - member = dataall.db.models.GroupMember( - userName=user2.userName, - groupUri=group.groupUri, - ) - session.add(member) - session.commit() - yield group - - -@pytest.fixture(scope='module', autouse=True) -def user3(db): - with db.scoped_session() as session: - user = dataall.db.models.User(userId='david@test.com', userName='david') - session.add(user) - yield user - - -@pytest.fixture(scope='module') -def group3(db, user3): - with db.scoped_session() as session: - group = dataall.db.models.Group(name='datascientists', label='datascientists', owner=user3.userName) - session.add(group) - session.commit() - member = dataall.db.models.GroupMember( - userName=user3.userName, - groupUri=group.groupUri, - ) - session.add(member) - session.commit() - yield group - - -@pytest.fixture(scope='module') -def group4(db, user3): - with db.scoped_session() as session: - group = dataall.db.models.Group(name='externals', label='externals', owner=user3.userName) - session.add(group) - session.commit() - member = dataall.db.models.GroupMember( - userName=user3.userName, - groupUri=group.groupUri, - ) - session.add(member) - session.commit() - yield group - - -@pytest.fixture(scope='module') -def tenant(db, group, group2, permissions, user, user2, user3, group3, group4): - with db.scoped_session() as session: - tenant = dataall.db.api.Tenant.save_tenant(session, name='dataall', description='Tenant dataall') - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group2.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group3.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group4.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - yield tenant - - -@pytest.fixture(scope='module', autouse=True) -def dataset(client, patch_es): - cache = {} - - def factory( - org: models.Organization, - env: models.Environment, - name: str, - owner: str, - group: str, - confidentiality: str = None - ) -> models.Dataset: - key = f'{org.organizationUri}-{env.environmentUri}-{name}-{group}' - if cache.get(key): - print('found in cache ', cache[key]) - return cache.get(key) - response = client.query( - """ - mutation CreateDataset($input:NewDatasetInput){ - createDataset( - input:$input - ){ - datasetUri - label - description - AwsAccountId - S3BucketName - GlueDatabaseName - owner - region, - businessOwnerEmail - businessOwnerDelegationEmails - SamlAdminGroupName - GlueCrawlerName - tables{ - nodes{ - tableUri - } - } - locations{ - nodes{ - locationUri - } - } - stack{ - stack - status - stackUri - targetUri - accountid - region - stackid - link - outputs - resources - - } - topics - language - confidentiality - organization{ - organizationUri - label - } - shares{ - nodes{ - shareUri - } - } - terms{ - count - nodes{ - __typename - ...on Term { - nodeUri - path - label - } - } - } - environment{ - environmentUri - label - region - subscriptionsEnabled - subscriptionsProducersTopicImported - subscriptionsConsumersTopicImported - subscriptionsConsumersTopicName - subscriptionsProducersTopicName - organization{ - organizationUri - label - } - } - statistics{ - tables - locations - upvotes - } - } - } - """, - username=owner, - groups=[group], - input={ - 'owner': owner, - 'label': f'{name}', - 'description': 'test dataset {name}', - 'businessOwnerEmail': 'jeff@amazon.com', - 'tags': random_tags(), - 'businessOwnerDelegationEmails': random_emails(), - 'environmentUri': env.environmentUri, - 'SamlAdminGroupName': group or random_group(), - 'organizationUri': org.organizationUri, - 'confidentiality': confidentiality or dataall.api.constants.ConfidentialityClassification.Unclassified.value - }, - ) - print('==>', response) - return response.data.createDataset - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def env(client): - cache = {} - - def factory(org, envname, owner, group, account, region, desc='test'): - key = f"{org.organizationUri}{envname}{owner}{''.join(group or '-')}{account}{region}" - if cache.get(key): - return cache[key] - response = client.query( - """mutation CreateEnv($input:NewEnvironmentInput){ - createEnvironment(input:$input){ - organization{ - organizationUri - } - environmentUri - label - AwsAccountId - SamlGroupName - region - name - owner - } - }""", - username=f'{owner}', - groups=[group], - input={ - 'label': f'{envname}', - 'description': f'{desc}', - 'organizationUri': org.organizationUri, - 'AwsAccountId': account, - 'tags': ['a', 'b', 'c'], - 'region': f'{region}', - 'SamlGroupName': f'{group}', - 'dashboardsEnabled': True, - 'vpcId': 'vpc-123456', - }, - ) - cache[key] = response.data.createEnvironment - return cache[key] - - yield factory - - -@pytest.fixture(scope="module") -def environment(db): - def factory( - organization: models.Organization, - awsAccountId: str, - label: str, - owner: str, - samlGroupName: str, - environmentDefaultIAMRoleName: str, - dashboardsEnabled: bool = False, - ) -> models.Environment: - with db.scoped_session() as session: - env = models.Environment( - organizationUri=organization.organizationUri, - AwsAccountId=awsAccountId, - region="eu-central-1", - label=label, - owner=owner, - tags=[], - description="desc", - SamlGroupName=samlGroupName, - EnvironmentDefaultIAMRoleName=environmentDefaultIAMRoleName, - EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{awsAccountId}:role/{environmentDefaultIAMRoleName}", - CDKRoleArn=f"arn:aws::{awsAccountId}:role/EnvRole", - dashboardsEnabled=dashboardsEnabled, - ) - session.add(env) - session.commit() - return env - - yield factory - -@pytest.fixture(scope="module") -def dataset_model(db): - def factory( - organization: models.Organization, - environment: models.Environment, - label: str, - ) -> models.Dataset: - with db.scoped_session() as session: - dataset = models.Dataset( - organizationUri=organization.organizationUri, - environmentUri=environment.environmentUri, - label=label, - owner=environment.owner, - stewards=environment.SamlGroupName, - SamlAdminGroupName=environment.SamlGroupName, - businessOwnerDelegationEmails=["foo@amazon.com"], - name=label, - S3BucketName=label, - GlueDatabaseName="gluedatabase", - KmsAlias="kmsalias", - AwsAccountId=environment.AwsAccountId, - region=environment.region, - IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", - IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", - ) - session.add(dataset) - session.commit() - return dataset - - yield factory - - -@pytest.fixture(scope="module") -def environment_group(db): - def factory( - environment: models.Environment, - group: models.Group, - ) -> models.EnvironmentGroup: - with db.scoped_session() as session: - - env_group = models.EnvironmentGroup( - environmentUri=environment.environmentUri, - groupUri=group.name, - environmentIAMRoleArn=environment.EnvironmentDefaultIAMRoleArn, - environmentIAMRoleName=environment.EnvironmentDefaultIAMRoleName, - environmentAthenaWorkGroup="workgroup", - ) - session.add(env_group) - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - resource_uri=environment.environmentUri, - group=group.name, - permissions=dataall.db.permissions.ENVIRONMENT_ALL, - resource_type=dataall.db.models.Environment.__name__, - ) - session.commit() - return env_group - - yield factory - - -@pytest.fixture(scope="module") -def share(db): - def factory( - dataset: models.Dataset, - environment: models.Environment, - env_group: models.EnvironmentGroup, - owner: str, - status: str - ) -> models.ShareObject: - with db.scoped_session() as session: - share = models.ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=environment.environmentUri, - owner=owner, - groupUri=env_group.groupUri, - principalId=env_group.groupUri, - principalType=constants.PrincipalType.Group.value, - principalIAMRoleName=env_group.environmentIAMRoleName, - status=status, - ) - session.add(share) - session.commit() - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=env_group.groupUri, - permissions=dataall.db.permissions.SHARE_OBJECT_REQUESTER, - resource_uri=share.shareUri, - resource_type=models.ShareObject.__name__, - ) - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.SamlAdminGroupName, - permissions=dataall.db.permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=dataall.db.models.ShareObject.__name__, - ) - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=dataset.stewards, - permissions=dataall.db.permissions.SHARE_OBJECT_APPROVER, - resource_uri=share.shareUri, - resource_type=dataall.db.models.ShareObject.__name__, - ) - session.commit() - return share - - yield factory - - -@pytest.fixture(scope="module") -def share_item(db): - def factory( - share: models.ShareObject, - table: models.DatasetTable, - status: str - ) -> models.ShareObjectItem: - with db.scoped_session() as session: - share_item = models.ShareObjectItem( - shareUri=share.shareUri, - owner="alice", - itemUri=table.tableUri, - itemType=constants.ShareableType.Table.value, - itemName=table.name, - status=status, - ) - session.add(share_item) - session.commit() - return share_item - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def location(db): - cache = {} - - def factory(dataset: models.Dataset, name, username) -> models.DatasetStorageLocation: - key = f'{dataset.datasetUri}-{name}' - if cache.get(key): - return cache.get(key) - with db.scoped_session() as session: - ds_location = models.DatasetStorageLocation( - name=name, - label=name, - owner=username, - datasetUri=dataset.datasetUri, - S3BucketName=dataset.S3BucketName, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3Prefix=f'{name}', - ) - session.add(ds_location) - return ds_location - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def table(db): - cache = {} - - def factory(dataset: models.Dataset, name, username) -> models.DatasetTable: - key = f'{dataset.datasetUri}-{name}' - if cache.get(key): - return cache.get(key) - with db.scoped_session() as session: - table = models.DatasetTable( - name=name, - label=name, - owner=username, - datasetUri=dataset.datasetUri, - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableName=name, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, - S3Prefix=f'{name}', - ) - session.add(table) - return table - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def table_with_permission(client, patch_es): - cache = {} - - def factory( - dataset: models.Dataset, - name: str, - owner: str, - group: str, - ) -> models.DatasetTable: - key = f'{dataset.datasetUri}-{name}' - if cache.get(key): - print('found in cache ', cache[key]) - return cache.get(key) - response = client.query( - """ - mutation CreateDatasetTable( - $datasetUri: String - $input: NewDatasetTableInput - ) { - createDatasetTable(datasetUri: $datasetUri, input: $input) { - tableUri - name - } - } - """, - username=owner, - groups=[group], - datasetUri=dataset.datasetUri, - input={ - 'label': f'{name}', - 'name': name, - 'description': f'test table {name}', - 'tags': random_tags(), - 'region': dataset.region - }, - ) - print('==>', response) - return response.data.createDatasetTable - - yield factory - - -@pytest.fixture(scope='module', autouse=True) -def org(client): - cache = {} - - def factory(orgname, owner, group): - key = orgname + owner + group - if cache.get(key): - print(f'returning item from cached key {key}') - return cache.get(key) - response = client.query( - """mutation CreateOrganization($input:NewOrganizationInput){ - createOrganization(input:$input){ - organizationUri - label - name - owner - SamlGroupName - } - }""", - username=f'{owner}', - groups=[group], - input={ - 'label': f'{orgname}', - 'description': f'test', - 'tags': ['a', 'b', 'c'], - 'SamlGroupName': f'{group}', - }, - ) - cache[key] = response.data.createOrganization - return cache[key] - - yield factory - - -@pytest.fixture(scope='module') -def org_fixture(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module') -def env_fixture(env, org_fixture, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch('dataall.api.Objects.Environment.resolvers.check_environment', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False - ) - env1 = env(org_fixture, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset_fixture(env_fixture, org_fixture, dataset, group, module_mocker) -> dataall.db.models.Dataset: - module_mocker.patch( - 'dataall.api.Objects.Dataset.resolvers.check_dataset_account', return_value=True - ) - yield dataset( - org=org_fixture, - env=env_fixture, - name='dataset1', - owner=env_fixture.owner, - group=group.name, - ) - - -@pytest.fixture(scope='module') -def cluster(env_fixture, org_fixture, client, group): - ouri = org_fixture.organizationUri - euri = env_fixture.environmentUri - group_name = group.name - res = client.query( - """ - mutation createRedshiftCluster { - createRedshiftCluster( - environmentUri:"%(euri)s", - clusterInput:{ - label : "mycluster", - description:"a test cluster", - vpc: "vpc-12345", - databaseName: "mydb", - masterDatabaseName: "masterDatabaseName", - masterUsername:"masterUsername", - nodeType: "multi-node", - numberOfNodes: 2, - subnetIds: ["subnet-1","subnet-2"], - securityGroupIds: ["sg-1","sg-2"], - tags:["test"], - SamlGroupName: "%(group_name)s" - } - ){ - clusterUri - label - description - tags - databaseName - masterDatabaseName - masterUsername - nodeType - numberOfNodes - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - owner - - } - } - """ - % vars(), - 'alice', - groups=[group_name], - ) - print(res) - yield res.data.createRedshiftCluster - - -@pytest.fixture(scope='module') -def sgm_notebook(client, tenant, group, env_fixture) -> dataall.db.models.SagemakerNotebook: - response = client.query( - """ - mutation createSagemakerNotebook($input:NewSagemakerNotebookInput){ - createSagemakerNotebook(input:$input){ - notebookUri - label - description - tags - owner - userRoleForNotebook - SamlAdminGroupName - VpcId - SubnetId - VolumeSizeInGB - InstanceType - } - } - """, - input={ - 'label': 'my pipeline', - 'SamlAdminGroupName': group.name, - 'tags': [group.name], - 'environmentUri': env_fixture.environmentUri, - 'VpcId': 'vpc-123567', - 'SubnetId': 'subnet-123567', - 'VolumeSizeInGB': 32, - 'InstanceType': 'ml.m5.xlarge', - }, - username='alice', - groups=[group.name], - ) - yield response.data.createSagemakerNotebook - - -@pytest.fixture(scope='module') -def pipeline(client, tenant, group, env_fixture) -> models.DataPipeline: - response = client.query( - """ - mutation createDataPipeline ($input:NewDataPipelineInput){ - createDataPipeline(input:$input){ - DataPipelineUri - label - description - tags - owner - repo - userRoleForPipeline - } - } - """, - input={ - 'label': 'my pipeline', - 'SamlGroupName': group.name, - 'tags': [group.name], - 'environmentUri': env_fixture.environmentUri, - 'devStrategy': 'trunk', - }, - username='alice', - groups=[group.name], - ) - yield response.data.createDataPipeline - - -@pytest.fixture(scope='module') -def sgm_studio(client, tenant, group, env_fixture, module_mocker) -> models.SagemakerStudioUserProfile: - module_mocker.patch( - 'dataall.aws.handlers.sagemaker_studio.SagemakerStudio.get_sagemaker_studio_domain', - return_value={'DomainId': 'test'}, - ) - response = client.query( - """ - mutation createSagemakerStudioUserProfile($input:NewSagemakerStudioUserProfileInput){ - createSagemakerStudioUserProfile(input:$input){ - sagemakerStudioUserProfileUri - name - label - created - description - SamlAdminGroupName - environmentUri - tags - } - } - """, - input={ - 'label': f'test1', - 'SamlAdminGroupName': group.name, - 'environmentUri': env_fixture.environmentUri, - }, - username='alice', - groups=[group.name], - ) - yield response.data.createSagemakerStudioUserProfile diff --git a/tests/api/test_client.py b/tests/api/test_client.py deleted file mode 100644 index e97f9c780..000000000 --- a/tests/api/test_client.py +++ /dev/null @@ -1,68 +0,0 @@ -def test_fine(db): - assert True - - -def test_app(app): - print(app) - - -def test_client(client): - response = client.query( - """ query Up { - up { - _ts - message - username - groups - } - }""" - ) - assert response.data.up.message == "server is up" - response = client.query( - """query Up { - up{ - _ts - message - username - groups - } - }""", - username="testuser", - ) - assert response.data.up.message == "server is up" - assert response.data.up.username == "testuser" - - response = client.query( - """query Up { - up { - _ts - message - username - groups - } - }""", - username="testuser", - groups=["a", "b"], - ) - assert response.data.up.message == "server is up" - assert response.data.up.username == "testuser" - assert str(response.data.up.groups) == str(["a", "b"]) - - response = client.query( - """query Up ($arg:String){ - up (arg:$arg){ - _ts - message - username - groups - arg - } - }""", - username="testuser", - groups=["a", "b"], - arg="argument1", - ) - assert response.data.up.message == "server is up" - assert response.data.up.username == "testuser" - assert str(response.data.up.groups) == str(["a", "b"]) - assert str(response.data.up.arg) == "argument1" diff --git a/tests/api/test_dashboards.py b/tests/api/test_dashboards.py deleted file mode 100644 index b275bb72c..000000000 --- a/tests/api/test_dashboards.py +++ /dev/null @@ -1,414 +0,0 @@ -import typing -import pytest - -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.check_environment', return_value=True - ) - module_mocker.patch( - 'dataall.api.Objects.Environment.resolvers.get_pivot_role_as_part_of_environment', return_value=False - ) - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset1( - org1: dataall.db.models.Organization, - env1: dataall.db.models.Environment, - dataset: typing.Callable, -) -> dataall.db.models.Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group='dataset1admins' - ) - - -@pytest.fixture(scope='module') -def dashboard(client, env1, org1, group, module_mocker, patch_es): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) - response = client.query( - """ - mutation importDashboard( - $input:ImportDashboardInput, - ){ - importDashboard(input:$input){ - dashboardUri - name - label - DashboardId - created - owner - SamlGroupName - upvotes - userRoleForDashboard - } - } - """, - input={ - 'dashboardId': f'1234', - 'label': f'1234', - 'environmentUri': env1.environmentUri, - 'SamlGroupName': group.name, - 'terms': ['term'], - }, - username='alice', - groups=[group.name], - ) - assert response.data.importDashboard.owner == 'alice' - assert response.data.importDashboard.SamlGroupName == group.name - yield response.data.importDashboard - - -def test_update_dashboard( - client, env1, org1, group, module_mocker, patch_es, dashboard -): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) - response = client.query( - """ - mutation updateDashboard( - $input:UpdateDashboardInput, - ){ - updateDashboard(input:$input){ - dashboardUri - name - label - DashboardId - created - owner - SamlGroupName - } - } - """, - input={ - 'dashboardUri': dashboard.dashboardUri, - 'label': f'1234', - 'terms': ['term2'], - }, - username='alice', - groups=[group.name], - ) - assert response.data.updateDashboard.owner == 'alice' - assert response.data.updateDashboard.SamlGroupName == group.name - - -def test_list_dashboards(client, env1, db, org1, dashboard): - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - } - } - } - """, - filter={}, - username='alice', - ) - assert len(response.data.searchDashboards['nodes']) == 1 - - -def test_nopermissions_list_dashboards(client, env1, db, org1, dashboard): - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - } - } - } - """, - filter={}, - username='bob', - ) - assert len(response.data.searchDashboards['nodes']) == 0 - - -def test_get_dashboard(client, env1, db, org1, dashboard, group): - response = client.query( - """ - query GetDashboard($dashboardUri:String!){ - getDashboard(dashboardUri:$dashboardUri){ - dashboardUri - name - owner - SamlGroupName - description - label - created - tags - environment{ - label - region - } - organization{ - organizationUri - label - name - } - } - } - """, - dashboardUri=dashboard.dashboardUri, - username='alice', - groups=[group.name], - ) - assert response.data.getDashboard.owner == 'alice' - assert response.data.getDashboard.SamlGroupName == group.name - - -def test_request_dashboard_share( - client, - env1, - db, - org1, - user, - group, - module_mocker, - dashboard, - patch_es, - group2, - user2, -): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation requestDashboardShare($dashboardUri:String!, $principalId:String!){ - requestDashboardShare(dashboardUri:$dashboardUri, principalId:$principalId){ - shareUri - status - } - } - """, - dashboardUri=dashboard.dashboardUri, - principalId=group2.name, - username=user2.userName, - groups=[group2.name], - ) - share = response.data.requestDashboardShare - assert share.shareUri - assert share.status == 'REQUESTED' - - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - userRoleForDashboard - } - } - } - """, - filter={}, - username=user2.userName, - groups=[group2.name], - ) - assert len(response.data.searchDashboards['nodes']) == 0 - - response = client.query( - """ - mutation approveDashboardShare($shareUri:String!){ - approveDashboardShare(shareUri:$shareUri){ - shareUri - status - } - } - """, - shareUri=share.shareUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.approveDashboardShare.status == 'APPROVED' - - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - userRoleForDashboard - } - } - } - """, - filter={}, - username=user2.userName, - groups=[group2.name], - ) - assert len(response.data.searchDashboards['nodes']) == 1 - - response = client.query( - """ - query listDashboardShares($dashboardUri:String!,$filter:DashboardShareFilter!){ - listDashboardShares(dashboardUri:$dashboardUri,filter:$filter){ - count - nodes{ - dashboardUri - shareUri - } - } - } - """, - filter={}, - dashboardUri=dashboard.dashboardUri, - username=user.userName, - groups=[group.name], - ) - assert len(response.data.listDashboardShares['nodes']) == 1 - - response = client.query( - """ - query GetDashboard($dashboardUri:String!){ - getDashboard(dashboardUri:$dashboardUri){ - dashboardUri - name - owner - SamlGroupName - description - label - created - tags - environment{ - label - region - } - organization{ - organizationUri - label - name - } - } - } - """, - dashboardUri=dashboard.dashboardUri, - username=user2.userName, - groups=[group2.name], - ) - assert response.data.getDashboard.owner == 'alice' - assert response.data.getDashboard.SamlGroupName == group.name - - response = client.query( - """ - mutation rejectDashboardShare($shareUri:String!){ - rejectDashboardShare(shareUri:$shareUri){ - shareUri - status - } - } - """, - shareUri=share.shareUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.rejectDashboardShare.status == 'REJECTED' - - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - userRoleForDashboard - } - } - } - """, - filter={}, - username=user2.userName, - groups=[group2.name], - ) - assert len(response.data.searchDashboards['nodes']) == 0 - - response = client.query( - """ - mutation shareDashboard($dashboardUri:String!, $principalId:String!){ - shareDashboard(dashboardUri:$dashboardUri, principalId:$principalId){ - shareUri - status - } - } - """, - dashboardUri=dashboard.dashboardUri, - principalId=group2.name, - username=user.userName, - groups=[group.name], - ) - assert response.data.shareDashboard.shareUri - - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - userRoleForDashboard - } - } - } - """, - filter={}, - username=user2.userName, - groups=[group2.name], - ) - assert len(response.data.searchDashboards['nodes']) == 1 - - -def test_delete_dashboard( - client, env1, db, org1, user, group, module_mocker, dashboard, patch_es -): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation deleteDashboard($dashboardUri:String!){ - deleteDashboard(dashboardUri:$dashboardUri) - } - """, - dashboardUri=dashboard.dashboardUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteDashboard - response = client.query( - """ - query searchDashboards($filter:DashboardFilter!){ - searchDashboards(filter:$filter){ - count - nodes{ - dashboardUri - } - } - } - """, - filter={}, - username='alice', - ) - assert len(response.data.searchDashboards['nodes']) == 0 diff --git a/tests/api/test_datapipelines.py b/tests/api/test_datapipelines.py deleted file mode 100644 index 9dcfd1446..000000000 --- a/tests/api/test_datapipelines.py +++ /dev/null @@ -1,234 +0,0 @@ -import pytest - - -@pytest.fixture(scope='module') -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module') -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'cicd', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - -@pytest.fixture(scope='module') -def env2(env, org1, user, group): - env2 = env(org1, 'dev', user.userName, group.name, '222222222222', 'eu-west-1') - yield env2 - - -@pytest.fixture(scope='module', autouse=True) -def pipeline(client, tenant, group, env1): - response = client.query( - """ - mutation createDataPipeline ($input:NewDataPipelineInput){ - createDataPipeline(input:$input){ - DataPipelineUri - label - description - tags - owner - repo - userRoleForPipeline - } - } - """, - input={ - 'label': 'my pipeline', - 'SamlGroupName': group.name, - 'tags': [group.name], - 'environmentUri': env1.environmentUri, - 'devStrategy': 'trunk', - }, - username='alice', - groups=[group.name], - ) - assert response.data.createDataPipeline.repo - assert response.data.createDataPipeline.DataPipelineUri - return response.data.createDataPipeline - - -def test_create_pipeline_environment(client, tenant, group, env2, pipeline): - response = client.query( - """ - mutation createDataPipelineEnvironment($input: NewDataPipelineEnvironmentInput) { - createDataPipelineEnvironment(input: $input) { - envPipelineUri - environmentUri - environmentLabel - pipelineUri - pipelineLabel - stage - region - AwsAccountId - samlGroupName - } - } - """, - input={ - 'stage': 'dev', - 'order': 1, - 'pipelineUri': pipeline.DataPipelineUri, - 'environmentUri': env2.environmentUri, - 'environmentLabel': env2.label, - 'samlGroupName': group.name - }, - username='alice', - groups=[group.name], - ) - assert response.data.createDataPipelineEnvironment.envPipelineUri - assert response.data.createDataPipelineEnvironment.stage == 'dev' - assert response.data.createDataPipelineEnvironment.AwsAccountId == env2.AwsAccountId - - - - -def test_update_pipeline(client, tenant, group, pipeline): - response = client.query( - """ - mutation updateDataPipeline ($DataPipelineUri:String!,$input:UpdateDataPipelineInput){ - updateDataPipeline(DataPipelineUri:$DataPipelineUri,input:$input){ - DataPipelineUri - label - description - tags - owner - repo - userRoleForPipeline - } - } - """, - DataPipelineUri=pipeline.DataPipelineUri, - input={ - 'label': 'changed pipeline', - 'tags': [group.name], - }, - username='alice', - groups=[group.name], - ) - assert response.data.updateDataPipeline.label == 'changed pipeline' - - -def test_list_pipelines(client, env1, db, org1, user, group, pipeline): - response = client.query( - """ - query ListDataPipelines($filter:DataPipelineFilter){ - listDataPipelines(filter:$filter){ - count - nodes{ - DataPipelineUri - cloneUrlHttp - environment { - environmentUri - } - organization { - organizationUri - } - } - } - } - """, - filter=None, - username=user.userName, - groups=[group.name], - ) - assert len(response.data.listDataPipelines['nodes']) == 1 - - -def test_nopermissions_pipelines(client, env1, db, org1, user, group, pipeline): - response = client.query( - """ - query listDataPipelines($filter:DataPipelineFilter){ - listDataPipelines(filter:$filter){ - count - nodes{ - DataPipelineUri - } - } - } - """, - filter=None, - username='bob', - ) - assert len(response.data.listDataPipelines['nodes']) == 0 - - -def test_get_pipeline(client, env1, db, org1, user, group, pipeline, module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.process', - return_value=[{'response': 'return value'}], - ) - module_mocker.patch( - 'dataall.api.Objects.DataPipeline.resolvers._get_creds_from_aws', - return_value=True, - ) - response = client.query( - """ - query getDataPipeline($DataPipelineUri:String!){ - getDataPipeline(DataPipelineUri:$DataPipelineUri){ - DataPipelineUri - } - } - """, - DataPipelineUri=pipeline.DataPipelineUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getDataPipeline.DataPipelineUri == pipeline.DataPipelineUri - response = client.query( - """ - query getDataPipelineCredsLinux($DataPipelineUri:String!){ - getDataPipelineCredsLinux(DataPipelineUri:$DataPipelineUri) - } - """, - DataPipelineUri=pipeline.DataPipelineUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getDataPipelineCredsLinux - response = client.query( - """ - query browseDataPipelineRepository($input:DataPipelineBrowseInput!){ - browseDataPipelineRepository(input:$input) - } - """, - input=dict(branch='master', DataPipelineUri=pipeline.DataPipelineUri), - username=user.userName, - groups=[group.name], - ) - assert response.data.browseDataPipelineRepository - - -def test_delete_pipelines(client, env1, db, org1, user, group, module_mocker, pipeline): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation deleteDataPipeline($DataPipelineUri:String!,$deleteFromAWS:Boolean){ - deleteDataPipeline(DataPipelineUri:$DataPipelineUri,deleteFromAWS:$deleteFromAWS) - } - """, - DataPipelineUri=pipeline.DataPipelineUri, - deleteFromAWS=True, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteDataPipeline - response = client.query( - """ - query ListDataPipelines($filter:DataPipelineFilter){ - listDataPipelines(filter:$filter){ - count - nodes{ - DataPipelineUri - } - } - } - """, - filter=None, - username=user.userName, - groups=[group.name], - ) - assert len(response.data.listDataPipelines['nodes']) == 0 diff --git a/tests/api/test_dataset_location.py b/tests/api/test_dataset_location.py deleted file mode 100644 index 32f876aa2..000000000 --- a/tests/api/test_dataset_location.py +++ /dev/null @@ -1,171 +0,0 @@ -import typing - -import pytest - -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name - ) - - -@pytest.fixture(scope='module') -def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: - yield org('org2', user2.userName, group2.name) - - -@pytest.fixture(scope='module') -def env2( - env: typing.Callable, org2: dataall.db.models.Organization, user2, group2, tenant -) -> dataall.db.models.Environment: - yield env(org2, 'dev', user2.userName, group2.name, '2' * 12, 'eu-west-2') - - -def test_init(db): - assert True - - -def test_get_dataset(client, dataset1, env1, user, group): - response = client.query( - """ - query GetDataset($datasetUri:String!){ - getDataset(datasetUri:$datasetUri){ - label - AwsAccountId - description - region - imported - importedS3Bucket - } - } - """, - datasetUri=dataset1.datasetUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getDataset.AwsAccountId == env1.AwsAccountId - assert response.data.getDataset.region == env1.region - assert response.data.getDataset.label == 'dataset1' - assert response.data.getDataset.imported is False - assert response.data.getDataset.importedS3Bucket is False - - -def test_create_location(client, dataset1, env1, user, group, patch_es, module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.s3.S3.create_bucket_prefix', return_value=True - ) - response = client.query( - """ - mutation createDatasetStorageLocation($datasetUri:String!, $input:NewDatasetStorageLocationInput!){ - createDatasetStorageLocation(datasetUri:$datasetUri, input:$input){ - locationUri - S3Prefix - label - tags - } - } - """, - datasetUri=dataset1.datasetUri, - username=user.userName, - groups=[group.name], - input={ - 'label': 'testing', - 'prefix': 'mylocation', - 'tags': ['test'], - 'terms': ['term'], - }, - ) - assert response.data.createDatasetStorageLocation.label == 'testing' - assert response.data.createDatasetStorageLocation.S3Prefix == 'mylocation' - assert 'test' in response.data.createDatasetStorageLocation.tags - - -def test_manage_dataset_location(client, dataset1, env1, user, group): - response = client.query( - """ - query GetDataset($datasetUri:String!){ - getDataset(datasetUri:$datasetUri){ - label - AwsAccountId - description - region - imported - importedS3Bucket - locations{ - nodes{ - locationUri - } - } - } - } - """, - datasetUri=dataset1.datasetUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getDataset.locations.nodes[0].locationUri - - response = client.query( - """ - query getDatasetStorageLocation($locationUri:String!){ - getDatasetStorageLocation(locationUri:$locationUri){ - locationUri - S3Prefix - label - tags - } - } - """, - locationUri=response.data.getDataset.locations.nodes[0].locationUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getDatasetStorageLocation.label == 'testing' - assert response.data.getDatasetStorageLocation.S3Prefix == 'mylocation' - - response = client.query( - """ - mutation updateDatasetStorageLocation($locationUri:String!, $input:ModifyDatasetStorageLocationInput!){ - updateDatasetStorageLocation(locationUri:$locationUri, input:$input){ - locationUri - S3Prefix - label - tags - } - } - """, - locationUri=response.data.getDatasetStorageLocation.locationUri, - username=user.userName, - input={'label': 'testing2', 'terms': ['ert']}, - groups=[group.name], - ) - assert response.data.updateDatasetStorageLocation.label == 'testing2' - assert response.data.updateDatasetStorageLocation.S3Prefix == 'mylocation' - assert 'test' in response.data.updateDatasetStorageLocation.tags - - response = client.query( - """ - mutation deleteDatasetStorageLocation($locationUri: String!){ - deleteDatasetStorageLocation(locationUri:$locationUri) - } - """, - locationUri=response.data.updateDatasetStorageLocation.locationUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteDatasetStorageLocation diff --git a/tests/api/test_dataset_profiling.py b/tests/api/test_dataset_profiling.py deleted file mode 100644 index bcab1deb5..000000000 --- a/tests/api/test_dataset_profiling.py +++ /dev/null @@ -1,213 +0,0 @@ -import typing -import pytest - -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - -@pytest.fixture(scope='module', autouse=True) -def org2(org, user2, group2, tenant): - org2 = org('testorg2', user2.userName, group2.name) - yield org2 - - -@pytest.fixture(scope='module', autouse=True) -def env2(env, org2, user2, group2, tenant): - env2 = env(org2, 'dev2', user2.userName, group2.name, '2222222222', 'eu-west-1') - yield env2 - - -@pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group, user) -> dataall.db.models.Dataset: - dataset1 = dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name, - confidentiality=dataall.api.constants.ConfidentialityClassification.Secret.value - ) - yield dataset1 - -@pytest.fixture(scope='module') -def table1(dataset1, table_with_permission, group, user): - yield table_with_permission(dataset=dataset1, name="table1", owner=user.userName, group=group.name) - - -def test_start_profiling_run_authorized(org1, env1, dataset1, table1, client, module_mocker, db, user, group): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.process', return_value=True - ) - dataset1.GlueProfilingJobName = ('profile-job',) - dataset1.GlueProfilingTriggerSchedule = ('cron(* 2 * * ? *)',) - dataset1.GlueProfilingTriggerName = ('profile-job',) - response = client.query( - """ - mutation startDatasetProfilingRun($input:StartDatasetProfilingRunInput){ - startDatasetProfilingRun(input:$input) - { - profilingRunUri - } - } - """, - username=user.userName, - input={'datasetUri': dataset1.datasetUri, 'GlueTableName': table1.name}, - groups=[group.name], - ) - profiling = response.data.startDatasetProfilingRun - assert profiling.profilingRunUri - with db.scoped_session() as session: - profiling = session.query(dataall.db.models.DatasetProfilingRun).get( - profiling.profilingRunUri - ) - profiling.GlueJobRunId = 'jr_111111111111' - session.commit() - - -def test_start_profiling_run_unauthorized(org2, env2, dataset1, table1, client, module_mocker, db, user2, group2): - module_mocker.patch('requests.post', return_value=True) - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.process', return_value=True - ) - dataset1.GlueProfilingJobName = ('profile-job',) - dataset1.GlueProfilingTriggerSchedule = ('cron(* 2 * * ? *)',) - dataset1.GlueProfilingTriggerName = ('profile-job',) - response = client.query( - """ - mutation startDatasetProfilingRun($input:StartDatasetProfilingRunInput){ - startDatasetProfilingRun(input:$input) - { - profilingRunUri - } - } - """, - username=user2.userName, - input={'datasetUri': dataset1.datasetUri, 'GlueTableName': table1.name}, - groups=[group2.name], - ) - assert 'UnauthorizedOperation' in response.errors[0].message - - -def test_get_table_profiling_run_authorized( - client, dataset1, table1, module_mocker, db, user, group -): - module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers._get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) - - response = client.query( - """ - query getDatasetTableProfilingRun($tableUri:String!){ - getDatasetTableProfilingRun(tableUri:$tableUri){ - profilingRunUri - status - GlueTableName - } - } - """, - tableUri=table1.tableUri, - groups=[group.name], - username=user.userName, - ) - assert response.data.getDatasetTableProfilingRun['profilingRunUri'] - assert response.data.getDatasetTableProfilingRun['status'] == 'RUNNING' - assert response.data.getDatasetTableProfilingRun['GlueTableName'] == 'table1' - -def test_get_table_profiling_run_unauthorized( - client, dataset1, module_mocker, table1, db, user2, group2 -): - module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers._get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) - - response = client.query( - """ - query getDatasetTableProfilingRun($tableUri:String!){ - getDatasetTableProfilingRun(tableUri:$tableUri){ - profilingRunUri - status - GlueTableName - } - } - """, - tableUri=table1.tableUri, - groups=[group2.name], - username=user2.userName, - ) - assert 'UnauthorizedOperation' in response.errors[0].message - - -def test_list_table_profiling_runs_authorized( - client, dataset1, module_mocker, table1, db, user, group -): - module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers._get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) - module_mocker.patch('requests.post', return_value=True) - - response = client.query( - """ - query listDatasetTableProfilingRuns($tableUri:String!){ - listDatasetTableProfilingRuns(tableUri:$tableUri){ - count - nodes{ - profilingRunUri - status - GlueTableName - } - - } - } - """, - tableUri=table1.tableUri, - groups=[group.name], - username=user.userName, - ) - assert response.data.listDatasetTableProfilingRuns['count'] == 1 - assert response.data.listDatasetTableProfilingRuns['nodes'][0]['profilingRunUri'] - assert ( - response.data.listDatasetTableProfilingRuns['nodes'][0]['status'] == 'RUNNING' - ) - assert ( - response.data.listDatasetTableProfilingRuns['nodes'][0]['GlueTableName'] - == 'table1' - ) - -def test_list_table_profiling_runs_unauthorized( - client, dataset1, module_mocker, table1, db, user2, group2 -): - module_mocker.patch( - 'dataall.api.Objects.DatasetProfiling.resolvers._get_profiling_results_from_s3', - return_value='{"results": "yes"}', - ) - module_mocker.patch('requests.post', return_value=True) - - response = client.query( - """ - query listDatasetTableProfilingRuns($tableUri:String!){ - listDatasetTableProfilingRuns(tableUri:$tableUri){ - count - nodes{ - profilingRunUri - status - GlueTableName - } - - } - } - """, - tableUri=table1.tableUri, - groups=[group2.name], - username=user2.userName, - ) - assert 'UnauthorizedOperation' in response.errors[0].message diff --git a/tests/api/test_dataset_table.py b/tests/api/test_dataset_table.py deleted file mode 100644 index 66986a41a..000000000 --- a/tests/api/test_dataset_table.py +++ /dev/null @@ -1,347 +0,0 @@ -import typing - -import pytest - -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name - ) - - -@pytest.fixture(scope='module') -def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: - yield org('org2', user2.userName, group2.name) - - -@pytest.fixture(scope='module') -def env2( - env: typing.Callable, org2: dataall.db.models.Organization, user2, group2, tenant -) -> dataall.db.models.Environment: - yield env(org2, 'dev', user2.userName, group2.name, '2' * 12, 'eu-west-2') - - -def test_init(db): - assert True - - -def test_get_dataset(client, dataset1, env1, user): - response = client.query( - """ - query GetDataset($datasetUri:String!){ - getDataset(datasetUri:$datasetUri){ - label - AwsAccountId - description - region - imported - importedS3Bucket - } - } - """, - datasetUri=dataset1.datasetUri, - username=user.userName, - groups=[dataset1.SamlAdminGroupName], - ) - assert response.data.getDataset.AwsAccountId == env1.AwsAccountId - assert response.data.getDataset.region == env1.region - assert response.data.getDataset.label == 'dataset1' - assert response.data.getDataset.imported is False - assert response.data.getDataset.importedS3Bucket is False - - -def test_add_tables(table, dataset1, db): - for i in range(0, 10): - table(dataset=dataset1, name=f'table{i+1}', username=dataset1.owner) - - with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetTable).count() - assert nb == 10 - - -def test_update_table(client, env1, table, dataset1, db, user, group): - table_to_update = table( - dataset=dataset1, name=f'table_to_update', username=dataset1.owner - ) - response = client.query( - """ - mutation UpdateDatasetTable($tableUri:String!,$input:ModifyDatasetTableInput!){ - updateDatasetTable(tableUri:$tableUri,input:$input){ - tableUri - description - tags - } - } - """, - username=user.userName, - groups=[group.name], - tableUri=table_to_update.tableUri, - input={ - 'description': 'test update', - 'tags': ['t1', 't2'], - }, - ) - assert response.data.updateDatasetTable.description == 'test update' - assert 't1' in response.data.updateDatasetTable.tags - - -def test_add_columns(table, dataset1, db): - with db.scoped_session() as session: - table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') - .first() - ) - table_col = dataall.db.models.DatasetTableColumn( - name='col1', - description='None', - label='col1', - owner=table.owner, - datasetUri=table.datasetUri, - tableUri=table.tableUri, - AWSAccountId=table.AWSAccountId, - GlueDatabaseName=table.GlueDatabaseName, - GlueTableName=table.GlueTableName, - region=table.region, - typeName='String', - ) - session.add(table_col) - - -def test_list_dataset_tables(client, dataset1): - q = """ - query GetDataset($datasetUri:String!,$tableFilter:DatasetTableFilter){ - getDataset(datasetUri:$datasetUri){ - datasetUri - tables(filter:$tableFilter){ - count - nodes{ - tableUri - name - label - GlueDatabaseName - GlueTableName - S3Prefix - } - } - } - } - """ - response = client.query( - q, - username=dataset1.owner, - datasetUri=dataset1.datasetUri, - tableFilter={'pageSize': 100}, - groups=[dataset1.SamlAdminGroupName], - ) - assert response.data.getDataset.tables.count >= 10 - assert len(response.data.getDataset.tables.nodes) >= 10 - - response = client.query( - q, - username=dataset1.owner, - datasetUri=dataset1.datasetUri, - tableFilter={'pageSize': 3}, - groups=[dataset1.SamlAdminGroupName], - ) - assert response.data.getDataset.tables.count >= 10 - assert len(response.data.getDataset.tables.nodes) == 3 - - response = client.query( - q, - username=dataset1.owner, - datasetUri=dataset1.datasetUri, - tableFilter={'pageSize': 100, 'term': 'table1'}, - groups=[dataset1.SamlAdminGroupName], - ) - assert response.data.getDataset.tables.count == 2 - assert len(response.data.getDataset.tables.nodes) == 2 - - -def test_update_dataset_table_column(client, table, dataset1, db): - with db.scoped_session() as session: - table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') - .first() - ) - column = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == table.tableUri) - .first() - ) - response = client.query( - """ - mutation updateDatasetTableColumn($columnUri:String!,$input:DatasetTableColumnInput){ - updateDatasetTableColumn(columnUri:$columnUri,input:$input){ - description - } - } - """, - username=dataset1.owner, - columnUri=column.columnUri, - input={'description': 'My new description'}, - groups=[dataset1.SamlAdminGroupName], - ) - print('response', response) - assert ( - response.data.updateDatasetTableColumn.description == 'My new description' - ) - - column = session.query(dataall.db.models.DatasetTableColumn).get( - column.columnUri - ) - assert column.description == 'My new description' - response = client.query( - """ - mutation updateDatasetTableColumn($columnUri:String!,$input:DatasetTableColumnInput){ - updateDatasetTableColumn(columnUri:$columnUri,input:$input){ - description - } - } - """, - username='unauthorized', - columnUri=column.columnUri, - input={'description': 'My new description'}, - ) - assert 'Unauthorized' in response.errors[0].message - - -def test_sync_tables_and_columns(client, table, dataset1, db): - with db.scoped_session() as session: - table = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') - .first() - ) - column = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == table.tableUri) - .first() - ) - glue_tables = [ - { - 'Name': 'new_table', - 'DatabaseName': dataset1.GlueDatabaseName, - 'StorageDescriptor': { - 'Columns': [ - { - 'Name': 'col1', - 'Type': 'string', - 'Comment': 'comment_col', - 'Parameters': {'colp1': 'p1'}, - }, - ], - 'Location': f's3://{dataset1.S3BucketName}/table1', - 'Parameters': {'p1': 'p1'}, - }, - 'PartitionKeys': [ - { - 'Name': 'partition1', - 'Type': 'string', - 'Comment': 'comment_partition', - 'Parameters': {'partition_1': 'p1'}, - }, - ], - }, - { - 'Name': 'table1', - 'DatabaseName': dataset1.GlueDatabaseName, - 'StorageDescriptor': { - 'Columns': [ - { - 'Name': 'col1', - 'Type': 'string', - 'Comment': 'comment_col', - 'Parameters': {'colp1': 'p1'}, - }, - ], - 'Location': f's3://{dataset1.S3BucketName}/table1', - 'Parameters': {'p1': 'p1'}, - }, - 'PartitionKeys': [ - { - 'Name': 'partition1', - 'Type': 'string', - 'Comment': 'comment_partition', - 'Parameters': {'partition_1': 'p1'}, - }, - ], - }, - ] - - assert dataall.db.api.DatasetTable.sync( - session, dataset1.datasetUri, glue_tables - ) - new_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'new_table') - .first() - ) - assert new_table - assert new_table.GlueTableName == 'new_table' - columns: [dataall.db.models.DatasetTableColumn] = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == new_table.tableUri) - .order_by(dataall.db.models.DatasetTableColumn.columnType.asc()) - .all() - ) - assert len(columns) == 2 - assert columns[0].columnType == 'column' - assert columns[1].columnType == 'partition_0' - - existing_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table1') - .first() - ) - assert existing_table - assert existing_table.GlueTableName == 'table1' - columns: [dataall.db.models.DatasetTableColumn] = ( - session.query(dataall.db.models.DatasetTableColumn) - .filter(dataall.db.models.DatasetTableColumn.tableUri == new_table.tableUri) - .order_by(dataall.db.models.DatasetTableColumn.columnType.asc()) - .all() - ) - assert len(columns) == 2 - assert columns[0].columnType == 'column' - assert columns[1].columnType == 'partition_0' - - deleted_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.name == 'table2') - .first() - ) - assert deleted_table.LastGlueTableStatus == 'Deleted' - - -def test_delete_table(client, table, dataset1, db, group): - table_to_delete = table( - dataset=dataset1, name=f'table_to_update', username=dataset1.owner - ) - response = client.query( - """ - mutation deleteDatasetTable($tableUri:String!){ - deleteDatasetTable(tableUri:$tableUri) - } - """, - username='alice', - groups=[group.name], - tableUri=table_to_delete.tableUri, - ) - assert response.data.deleteDatasetTable diff --git a/tests/api/test_environment.py b/tests/api/test_environment.py deleted file mode 100644 index aca5a6bf1..000000000 --- a/tests/api/test_environment.py +++ /dev/null @@ -1,737 +0,0 @@ -import pytest - -import dataall -from dataall.db import permissions - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -def test_get_environment(client, org1, env1, group): - response = client.query( - """ - query GetEnv($environmentUri:String!){ - getEnvironment(environmentUri:$environmentUri){ - organization{ - organizationUri - } - environmentUri - label - AwsAccountId - region - SamlGroupName - owner - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled - stack{ - EcsTaskArn - EcsTaskId - } - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - groups=[group.name], - ) - assert ( - response.data.getEnvironment.organization.organizationUri - == org1.organizationUri - ) - assert response.data.getEnvironment.owner == 'alice' - assert response.data.getEnvironment.AwsAccountId == env1.AwsAccountId - assert response.data.getEnvironment.dashboardsEnabled - assert response.data.getEnvironment.notebooksEnabled - assert response.data.getEnvironment.mlStudiosEnabled - assert response.data.getEnvironment.pipelinesEnabled - assert response.data.getEnvironment.warehousesEnabled - - -def test_get_environment_object_not_found(client, org1, env1, group): - response = client.query( - """ - query GetEnv($environmentUri:String!){ - getEnvironment(environmentUri:$environmentUri){ - organization{ - organizationUri - } - environmentUri - label - AwsAccountId - region - SamlGroupName - owner - } - } - """, - username='alice', - environmentUri='doesnotexist', - groups=[group.name], - ) - assert 'UnauthorizedOperation' in response.errors[0].message - - -def test_update_env(client, org1, env1, group): - response = client.query( - """ - mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ - updateEnvironment(environmentUri:$environmentUri,input:$input){ - organization{ - organizationUri - } - label - AwsAccountId - region - SamlGroupName - owner - tags - resourcePrefix - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled - - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - input={ - 'label': 'DEV', - 'tags': ['test', 'env'], - 'dashboardsEnabled': False, - 'notebooksEnabled': False, - 'mlStudiosEnabled': False, - 'pipelinesEnabled': False, - 'warehousesEnabled': False, - 'resourcePrefix': 'customer-prefix_AZ390 ', - }, - groups=[group.name], - ) - assert 'InvalidInput' in response.errors[0].message - - response = client.query( - """ - mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ - updateEnvironment(environmentUri:$environmentUri,input:$input){ - organization{ - organizationUri - } - label - AwsAccountId - region - SamlGroupName - owner - tags - resourcePrefix - dashboardsEnabled - notebooksEnabled - mlStudiosEnabled - pipelinesEnabled - warehousesEnabled - - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - input={ - 'label': 'DEV', - 'tags': ['test', 'env'], - 'dashboardsEnabled': False, - 'notebooksEnabled': False, - 'mlStudiosEnabled': False, - 'pipelinesEnabled': False, - 'warehousesEnabled': False, - 'resourcePrefix': 'customer-prefix', - }, - groups=[group.name], - ) - print(response) - assert ( - response.data.updateEnvironment.organization.organizationUri - == org1.organizationUri - ) - assert response.data.updateEnvironment.owner == 'alice' - assert response.data.updateEnvironment.AwsAccountId == env1.AwsAccountId - assert response.data.updateEnvironment.label == 'DEV' - assert str(response.data.updateEnvironment.tags) == str(['test', 'env']) - assert not response.data.updateEnvironment.dashboardsEnabled - assert not response.data.updateEnvironment.notebooksEnabled - assert not response.data.updateEnvironment.mlStudiosEnabled - assert not response.data.updateEnvironment.pipelinesEnabled - assert not response.data.updateEnvironment.warehousesEnabled - assert response.data.updateEnvironment.resourcePrefix == 'customer-prefix' - - -def test_unauthorized_update(client, org1, env1): - response = client.query( - """ - mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ - updateEnvironment(environmentUri:$environmentUri,input:$input){ - organization{ - organizationUri - } - label - AwsAccountId - region - SamlGroupName - owner - tags - } - } - """, - username='bob', - environmentUri=env1.environmentUri, - input={'label': 'DEV', 'tags': ['test', 'env']}, - ) - assert 'UnauthorizedOperation' in response.errors[0].message - - -def test_list_environments_no_filter(org1, env1, client, group): - response = client.query( - """ - query ListEnvironments($filter:EnvironmentFilter){ - listEnvironments(filter:$filter){ - count - nodes{ - environmentUri - owner - name - userRoleInEnvironment - label - AwsAccountId - region - } - } - } - """, - username='alice', - groups=[group.name], - ) - print(response) - - assert response.data.listEnvironments.count == 1 - - response = client.query( - """ - query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ - listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ - count - nodes{ - VpcId - SamlGroupName - } - } - } - """, - environmentUri=env1.environmentUri, - username='alice', - groups=[group.name], - ) - print(response) - - assert response.data.listEnvironmentNetworks.count == 1 - - -def test_list_environment_role_filter_as_creator(org1, env1, client, group): - response = client.query( - """ - query ListEnvironments($filter:EnvironmentFilter){ - listEnvironments(filter:$filter){ - count - nodes{ - environmentUri - name - owner - label - AwsAccountId - region - } - } - } - """, - username='alice', - groups=[group.name], - ) - print('--->', response) - - assert response.data.listEnvironments.count == 1 - - -def test_list_environment_role_filter_as_admin(db, client, org1, env1, user, group): - response = client.query( - """ - query ListEnvironments($filter:EnvironmentFilter){ - listEnvironments(filter:$filter){ - count - nodes{ - environmentUri - name - owner - label - AwsAccountId - region - } - } - } - """, - username=user.userName, - groups=[group.name], - filter={'roles': [dataall.api.constants.EnvironmentPermission.Invited.name]}, - ) - - assert response.data.listEnvironments.count == 1 - - -def test_paging(db, client, org1, env1, user, group): - for i in range(1, 30): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org1.organizationUri, - AwsAccountId=f'12345678901+{i}', - region='eu-west-1', - label='org', - owner=user.userName, - tags=[], - description='desc', - SamlGroupName=group.name, - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - session.commit() - - hasNext = True - nb_iter = 0 - page = 1 - max_iter = 10 - first_id = None - while hasNext and nb_iter < max_iter: - response = client.query( - """ - query LE($filter:EnvironmentFilter){ - listEnvironments(filter:$filter){ - count - page - pageSize - hasNext - hasPrevious - nodes{ - environmentUri - } - } - } - """, - username=user.userName, - filter={'page': page, 'pageSize': 5}, - groups=[group.name], - ) - assert len(response.data.listEnvironments.nodes) == 5 - hasNext = response.data.listEnvironments.hasNext - nb_iter = nb_iter + 1 - page += 1 - if page > 1: - assert first_id != response.data.listEnvironments.nodes[0].environmentUri - first_id = response.data.listEnvironments.nodes[0].environmentUri - - -def test_group_invitation(db, client, env1, org1, group2, user, group3, group, dataset): - response = client.query( - """ - query listResourcePermissions($filter:ResourcePermissionFilter){ - listResourcePermissions(filter:$filter){ - count - nodes{ - permissionUri - name - type - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - filter={}, - ) - - assert response.data.listResourcePermissions.count > 1 - - response = client.query( - """ - query listEnvironmentGroupInvitationPermissions($environmentUri:String){ - listEnvironmentGroupInvitationPermissions(environmentUri:$environmentUri){ - permissionUri - name - type - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - filter={}, - ) - - env_permissions = [ - p.name for p in response.data.listEnvironmentGroupInvitationPermissions - ] - assert permissions.CREATE_DATASET in env_permissions - - response = client.query( - """ - mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ - inviteGroupOnEnvironment(input:$input){ - environmentUri - } - } - """, - username='alice', - input=dict( - environmentUri=env1.environmentUri, - groupUri=group2.name, - permissions=env_permissions, - environmentIAMRoleName='myteamrole', - ), - groups=[group.name, group2.name], - ) - print(response) - assert response.data.inviteGroupOnEnvironment - - response = client.query( - """ - query getGroup($groupUri:String!, $environmentUri:String){ - getGroup(groupUri:$groupUri){ - environmentPermissions(environmentUri:$environmentUri){ - name - } - } - } - """, - username=user.userName, - groups=[group2.name], - groupUri=group2.name, - environmentUri=env1.environmentUri, - ) - env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] - assert permissions.CREATE_DATASET in env_permissions - - response = client.query( - """ - mutation updateGroupEnvironmentPermissions($input:InviteGroupOnEnvironmentInput!){ - updateGroupEnvironmentPermissions(input:$input){ - environmentUri - } - } - """, - username='alice', - input=dict( - environmentUri=env1.environmentUri, - groupUri=group2.name, - permissions=env_permissions, - ), - groups=[group.name, group2.name], - ) - print(response) - assert response.data.updateGroupEnvironmentPermissions - response = client.query( - """ - query listEnvironmentInvitedGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentInvitedGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listEnvironmentInvitedGroups.count == 1 - - response = client.query( - """ - query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - environmentIAMRoleName - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listEnvironmentGroups.count == 2 - assert 'myteamrole' in [ - g.environmentIAMRoleName for g in response.data.listEnvironmentGroups.nodes - ] - - response = client.query( - """ - query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - } - } - } - """, - username=user.userName, - groups=[group.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listEnvironmentGroups.count == 1 - - response = client.query( - """ - query listAllEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ - listAllEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - } - } - } - """, - username=user.userName, - groups=[group.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listAllEnvironmentGroups.count == 2 - - dataset = dataset( - org=org1, env=env1, name='dataset1', owner='bob', group=group2.name - ) - assert dataset.datasetUri - - response = client.query( - """ - mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ - removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ - environmentUri - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - groupUri=group2.name, - groups=[group.name, group2.name], - ) - print(response) - - assert 'EnvironmentResourcesFound' in response.errors[0].message - with db.scoped_session() as session: - dataset = session.query(dataall.db.models.Dataset).get(dataset.datasetUri) - session.delete(dataset) - session.commit() - - response = client.query( - """ - mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ - removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ - environmentUri - } - } - """, - username='alice', - environmentUri=env1.environmentUri, - groupUri=group2.name, - groups=[group.name, group2.name], - ) - print(response) - assert response.data.removeGroupFromEnvironment - - response = client.query( - """ - query listEnvironmentInvitedGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentInvitedGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listEnvironmentInvitedGroups.count == 0 - - response = client.query( - """ - query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name], - environmentUri=env1.environmentUri, - filter={}, - ) - - assert response.data.listEnvironmentGroups.count == 1 - - response = client.query( - """ - mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ - inviteGroupOnEnvironment(input:$input){ - environmentUri - } - } - """, - username='alice', - input=dict( - environmentUri=env1.environmentUri, - groupUri=group3.name, - permissions=env_permissions, - ), - groups=[group.name, group3.name], - ) - print(response) - assert response.data.inviteGroupOnEnvironment - - response = client.query( - """ - query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ - listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ - count - nodes{ - groupUri - name - environmentIAMRoleName - } - } - } - """, - username=user.userName, - groups=[group.name, group2.name, group3.name], - environmentUri=env1.environmentUri, - filter={}, - ) - assert 'myteamrole' not in [ - g.environmentIAMRoleName for g in response.data.listEnvironmentGroups.nodes - ] - - -def test_archive_env(client, org1, env1, group, group2): - response = client.query( - """ - mutation deleteEnvironment($environmentUri:String!, $deleteFromAWS:Boolean!){ - deleteEnvironment(environmentUri:$environmentUri, deleteFromAWS:$deleteFromAWS) - } - """, - username='alice', - groups=[group.name, group2.name], - environmentUri=env1.environmentUri, - deleteFromAWS=True, - ) - print(response) - assert response.data.deleteEnvironment - - -def test_create_environment(db, client, org1, env1, user, group): - response = client.query( - """mutation CreateEnv($input:NewEnvironmentInput){ - createEnvironment(input:$input){ - organization{ - organizationUri - } - environmentUri - label - AwsAccountId - SamlGroupName - region - name - owner - EnvironmentDefaultIAMRoleName - EnvironmentDefaultIAMRoleImported - dashboardsEnabled - resourcePrefix - networks{ - VpcId - region - privateSubnetIds - publicSubnetIds - default - } - } - }""", - username=user.userName, - groups=[group.name], - input={ - 'label': f'dev', - 'description': f'test', - 'EnvironmentDefaultIAMRoleName': 'myOwnIamRole', - 'organizationUri': org1.organizationUri, - 'AwsAccountId': env1.AwsAccountId, - 'tags': ['a', 'b', 'c'], - 'region': f'{env1.region}', - 'SamlGroupName': group.name, - 'vpcId': 'vpc-1234567', - 'privateSubnetIds': 'subnet-1', - 'publicSubnetIds': 'subnet-21', - 'dashboardsEnabled': True, - 'resourcePrefix': 'customer-prefix', - }, - ) - assert response.data.createEnvironment.dashboardsEnabled - assert response.data.createEnvironment.networks - assert ( - response.data.createEnvironment.EnvironmentDefaultIAMRoleName == 'myOwnIamRole' - ) - assert response.data.createEnvironment.EnvironmentDefaultIAMRoleImported - assert response.data.createEnvironment.resourcePrefix == 'customer-prefix' - for vpc in response.data.createEnvironment.networks: - assert vpc.privateSubnetIds - assert vpc.publicSubnetIds - assert vpc.default - - with db.scoped_session() as session: - env = dataall.db.api.Environment.get_environment_by_uri( - session, response.data.createEnvironment.environmentUri - ) - session.delete(env) - session.commit() diff --git a/tests/api/test_feed.py b/tests/api/test_feed.py deleted file mode 100644 index 11f7c4891..000000000 --- a/tests/api/test_feed.py +++ /dev/null @@ -1,106 +0,0 @@ -import pytest - -from dataall.db import models - - -@pytest.fixture(scope='module', autouse=True) -def worksheet(db): - with db.scoped_session() as session: - w = models.Worksheet( - owner='me', - label='xxx', - SamlAdminGroupName='g', - ) - session.add(w) - return w - - -def test_post_message(client, worksheet): - response = client.query( - """ - mutation PostFeedMessage( - $targetUri : String!, - $targetType: String!, - $input:FeedMessageInput - ){ - postFeedMessage(targetUri:$targetUri, targetType:$targetType,input:$input){ - feedMessageUri - content - created - creator - } - } - """, - username='me', - targetUri=worksheet.worksheetUri, - targetType='Worksheet', - input={'content': 'hello'}, - ) - - assert response.data.postFeedMessage.content == 'hello' - assert response.data.postFeedMessage.creator == 'me' - - -def test_list_messages(client, worksheet): - response = client.query( - """ - query GetFeed( - $targetUri:String!, - $targetType:String!, - $filter:FeedMessageFilter! - ){ - getFeed( - targetUri:$targetUri, - targetType:$targetType, - - ){ - messages( filter:$filter){ - count - page - pages - hasNext - hasPrevious - nodes{ - content - created - } - } - } - } - """, - username='me', - targetUri=worksheet.worksheetUri, - targetType='Worksheet', - filter={}, - ) - - assert response.data.getFeed.messages.count == 1 - assert response.data.getFeed.messages.nodes[0].content == 'hello' - - -def test_get_target(client, worksheet): - response = client.query( - """ - query GetFeed( - $targetUri:String!, - $targetType:String!, - ){ - getFeed( - targetUri:$targetUri, - targetType:$targetType, - - ){ - - target{ - ... on Worksheet{ - worksheetUri - } - } - } - } - """, - targetUri=worksheet.worksheetUri, - targetType='Worksheet', - username='me', - ) - print(response) diff --git a/tests/api/test_glossary.py b/tests/api/test_glossary.py deleted file mode 100644 index d295fe068..000000000 --- a/tests/api/test_glossary.py +++ /dev/null @@ -1,685 +0,0 @@ -from typing import List -from dataall.db import models -import pytest - - -@pytest.fixture(scope='module') -def _org(db, org, tenant, user, group) -> models.Organization: - org = org('testorg', user.userName, group.name) - yield org - - -@pytest.fixture(scope='module') -def _env( - db, _org: models.Organization, user, group, env -) -> models.Environment: - env1 = env(_org, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module', autouse=True) -def _dataset(db, _env, _org, group, user, dataset) -> models.Dataset: - with db.scoped_session() as session: - yield dataset( - org=_org, env=_env, name='dataset1', owner=user.userName, group=group.name - ) - - -@pytest.fixture(scope='module', autouse=True) -def _table(db, _dataset) -> models.DatasetTable: - with db.scoped_session() as session: - t = models.DatasetTable( - datasetUri=_dataset.datasetUri, - label='table', - AWSAccountId=_dataset.AwsAccountId, - region=_dataset.region, - S3BucketName=_dataset.S3BucketName, - S3Prefix='/raw', - GlueTableName='table', - owner='alice', - GlueDatabaseName=_dataset.GlueDatabaseName, - ) - session.add(t) - yield t - - -@pytest.fixture(scope='module', autouse=True) -def _columns(db, _dataset, _table) -> List[models.DatasetTableColumn]: - with db.scoped_session() as session: - cols = [] - for i in range(0, 10): - c = models.DatasetTableColumn( - datasetUri=_dataset.datasetUri, - tableUri=_table.tableUri, - label=f'c{i+1}', - AWSAccountId=_dataset.AwsAccountId, - region=_dataset.region, - GlueTableName='table', - typeName='String', - owner='user', - GlueDatabaseName=_dataset.GlueDatabaseName, - ) - session.add(c) - cols.append(c) - yield cols - - -@pytest.fixture(scope='module', autouse=True) -def g1(client, group): - r = client.query( - """ - mutation CreateGlossary($input:CreateGlossaryInput){ - createGlossary(input:$input){ - nodeUri - label - readme - } - } - """, - input={ - 'label': 'Customer Glossary', - 'readme': 'Glossary of customer related data', - }, - username='alice', - groups=[group.name], - ) - yield r.data.createGlossary - - -@pytest.fixture(scope='module', autouse=True) -def c1(client, g1, group): - r = client.query( - """ - mutation CreateCategory( - $parentUri:String!, - $input:CreateCategoryInput){ - createCategory(parentUri:$parentUri,input:$input){ - nodeUri - label - readme - } - } - """, - parentUri=g1.nodeUri, - input={'label': 'Identifiers', 'readme': 'Customer identifiers category'}, - username='alice', - groups=[group.name], - ) - yield r.data.createCategory - - -@pytest.fixture(scope='module', autouse=True) -def subcategory(client, c1, group): - r = client.query( - """ - mutation CreateCategory( - $parentUri:String! - $input:CreateCategoryInput! - ){ - createCategory(parentUri:$parentUri,input:$input){ - nodeUri - label - readme - created - } - } - """, - input={ - 'label': 'OptionalIdentifiers', - 'readme': 'Additional, non required customer identifiers', - }, - parentUri=c1.nodeUri, - username='alice', - groups=[group.name], - ) - subcategory = r.data.createCategory - yield subcategory - - -@pytest.fixture(scope='module', autouse=True) -def t1(client, c1, group): - r = client.query( - """ - mutation CreateTerm( - $parentUri:String!, - $input:CreateTermInput){ - createTerm(parentUri:$parentUri,input:$input){ - nodeUri - label - readme - } - } - """, - parentUri=c1.nodeUri, - input={'label': 'Customer ID', 'readme': 'Global Customer Identifier'}, - username='alice', - groups=[group.name], - ) - yield r.data.createTerm - - -def test_list_glossaries(client): - response = client.query( - """ - query ListGlossaries{ - listGlossaries{ - count - nodes{ - nodeUri - children{ - count - nodes{ - __typename - ... on Category{ - label - nodeUri - path - } - ... on Term{ - label - nodeUri - path - } - } - } - stats{ - categories - terms - associations - } - } - } - } - """ - ) - print(response) - assert response.data.listGlossaries.count == 1 - assert response.data.listGlossaries.nodes[0].stats.categories == 2 - - -def test_hierarchical_search(client): - response = client.query( - """ - query SearchGlossary($filter:GlossaryNodeSearchFilter){ - searchGlossary(filter:$filter){ - count - page - pages - hasNext - hasPrevious - nodes{ - __typename - ...on Glossary{ - nodeUri - label - readme - created - owner - path - } - ...on Category{ - nodeUri - label - parentUri - readme - created - owner - path - } - ...on Term{ - nodeUri - parentUri - label - readme - created - owner - path - } - - } - } - } - """ - ) - print(response) - assert response.data.searchGlossary.count == 4 - - -def test_get_glossary(client, g1): - r = client.query( - """ - query GetGlossary($nodeUri:String!){ - getGlossary(nodeUri:$nodeUri){ - nodeUri - label - readme - } - } - """, - nodeUri=g1.nodeUri, - ) - print(r) - assert r.data.getGlossary.nodeUri == g1.nodeUri - assert r.data.getGlossary.label == g1.label - assert r.data.getGlossary.readme == g1.readme - - -def test_get_category(client, c1): - r = client.query( - """ - query GetCategory($nodeUri:String!){ - getCategory(nodeUri:$nodeUri){ - nodeUri - label - readme - } - } - """, - nodeUri=c1.nodeUri, - ) - print(r) - assert r.data.getCategory.nodeUri == c1.nodeUri - assert r.data.getCategory.label == c1.label - assert r.data.getCategory.readme == c1.readme - - -def test_get_term(client, t1): - r = client.query( - """ - query GetTerm($nodeUri:String!){ - getTerm(nodeUri:$nodeUri){ - nodeUri - label - readme - } - } - """, - nodeUri=t1.nodeUri, - ) - print(r) - assert r.data.getTerm.nodeUri == t1.nodeUri - assert r.data.getTerm.label == t1.label - assert r.data.getTerm.readme == t1.readme - - -def test_dataset_term_link_approval(db, client, t1, _dataset, user, group): - response = client.query( - """ - mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ - updateDataset(datasetUri:$datasetUri,input:$input){ - datasetUri - label - tags - } - } - """, - username='alice', - groups=[group.name], - datasetUri=_dataset.datasetUri, - input={ - 'terms': [t1.nodeUri], - 'KmsAlias': '' - }, - ) - with db.scoped_session() as session: - link: models.TermLink = ( - session.query(models.TermLink) - .filter(models.TermLink.nodeUri == t1.nodeUri) - .first() - ) - r = client.query( - """ - mutation ApproveTermAssociation($linkUri:String!){ - approveTermAssociation(linkUri:$linkUri) - } - """, - linkUri=link.linkUri, - username='alice', - groups=[group.name], - ) - assert r - link: models.TermLink = session.query(models.TermLink).get(link.linkUri) - assert link.approvedBySteward - - r = client.query( - """ - mutation DismissTermAssociation($linkUri:String!){ - dismissTermAssociation(linkUri:$linkUri) - } - """, - linkUri=link.linkUri, - username='alice', - groups=[group.name], - ) - assert r - link: models.TermLink = session.query(models.TermLink).get(link.linkUri) - assert not link.approvedBySteward - - -def test_glossary_categories(client, g1, c1): - r = client.query( - """ - query GetGlossary($nodeUri:String!){ - getGlossary(nodeUri:$nodeUri){ - nodeUri - label - readme - categories{ - count - page - pages - hasNext - hasPrevious - nodes{ - nodeUri - label - readme - } - } - } - } - """, - nodeUri=g1.nodeUri, - ) - assert r.data.getGlossary.categories.count == 1 - assert r.data.getGlossary.categories.nodes[0].nodeUri == c1.nodeUri - - -def test_list_subcategory(client, c1): - r = client.query( - """ - query GetCategory($nodeUri:String!){ - getCategory(nodeUri:$nodeUri){ - nodeUri - label - readme - categories{ - count - nodes{ - nodeUri - label - readme - } - } - } - } - """, - nodeUri=c1.nodeUri, - ) - - assert r.data.getCategory.categories.count == 1 - - -def test_list_category_terms(client, c1): - r = client.query( - """ - query GetCategory($nodeUri:String!){ - getCategory(nodeUri:$nodeUri){ - nodeUri - label - readme - terms{ - count - nodes{ - nodeUri - label - readme - } - } - } - } - """, - nodeUri=c1.nodeUri, - ) - assert r.data.getCategory.terms.count == 1 - - -def test_update_glossary(client, g1, group): - r = client.query( - """ - mutation UpdateGlossary( - $nodeUri:String!, - $input:UpdateGlossaryInput! - ){ - updateGlossary( - nodeUri:$nodeUri, - input:$input - ){ - nodeUri - label - readme - } - } - """, - nodeUri=g1.nodeUri, - input={'readme': g1.readme + '(updated description)'}, - username='alice', - groups=[group.name], - ) - assert r.data.updateGlossary.readme == g1.readme + '(updated description)' - - -def test_update_category(client, c1, group): - r = client.query( - """ - mutation UpdateCategory( - $nodeUri:String!, - $input:UpdateCategoryInput! - ){ - updateCategory( - nodeUri:$nodeUri, - input:$input - ){ - nodeUri - label - readme - } - } - """, - nodeUri=c1.nodeUri, - input={'readme': c1.readme + '(updated description)'}, - username='alice', - groups=[group.name], - ) - assert r.data.updateCategory.readme == c1.readme + '(updated description)' - - -def test_delete_subcategory(client, subcategory, group): - r = client.query( - """ - mutation DeleteCategory( - $nodeUri:String!, - ){ - deleteCategory( - nodeUri:$nodeUri, - ) - } - """, - nodeUri=subcategory.nodeUri, - username='alice', - groups=[group.name], - ) - print(r) - - -def test_link_term(client, t1, _columns, group): - col = _columns[0] - r = client.query( - """ - mutation LinkTerm( - $nodeUri:String!, - $targetUri:String!, - $targetType:String!, - ){ - linkTerm( - nodeUri:$nodeUri, - targetUri:$targetUri, - targetType:$targetType - ){ - linkUri - } - } - """, - nodeUri=t1.nodeUri, - targetUri=col.columnUri, - targetType='Column', - username='alice', - groups=[group.name], - ) - linkUri = r.data.linkTerm.linkUri - - r = client.query( - """ - query GetGlossaryTermLink($linkUri:String!){ - getGlossaryTermLink(linkUri:$linkUri){ - linkUri - created - target{ - __typename - ... on DatasetTableColumn{ - label - columnUri - } - } - } - } - """, - linkUri=linkUri, - username='alice', - ) - print(r) - - -def test_get_term_associations(t1, client): - r = client.query( - """ - query GetTerm($nodeUri:String!){ - getTerm(nodeUri:$nodeUri){ - nodeUri - label - readme - associations{ - count - nodes{ - linkUri - target{ - ... on DatasetTableColumn{ - label - columnUri - } - } - } - } - } - - } - """, - nodeUri=t1.nodeUri, - username='alice', - ) - print(r) - - -def test_delete_category(client, c1, group): - r = client.query( - """ - mutation DeleteCategory( - $nodeUri:String!, - ){ - deleteCategory( - nodeUri:$nodeUri, - ) - } - """, - nodeUri=c1.nodeUri, - username='alice', - groups=[group.name], - ) - print(r) - - -def test_list_glossaries_after_delete(client): - response = client.query( - """ - query ListGlossaries{ - listGlossaries{ - count - nodes{ - nodeUri - children{ - count - nodes{ - __typename - ... on Category{ - label - nodeUri - path - } - ... on Term{ - label - nodeUri - path - } - } - } - stats{ - categories - terms - associations - } - } - } - } - """ - ) - print(response) - assert response.data.listGlossaries.count == 1 - assert response.data.listGlossaries.nodes[0].stats.categories == 0 - - -def test_hierarchical_search_after_delete(client): - response = client.query( - """ - query SearchGlossary($filter:GlossaryNodeSearchFilter){ - searchGlossary(filter:$filter){ - count - page - pages - hasNext - hasPrevious - nodes{ - __typename - ...on Glossary{ - nodeUri - label - readme - created - owner - path - } - ...on Category{ - nodeUri - label - parentUri - readme - created - owner - path - } - ...on Term{ - nodeUri - parentUri - label - readme - created - owner - path - } - - } - } - } - """ - ) - print(response) - assert response.data.searchGlossary.count == 1 diff --git a/tests/api/test_group.py b/tests/api/test_group.py deleted file mode 100644 index c02e7de29..000000000 --- a/tests/api/test_group.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest - -import dataall -from dataall.db import permissions - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -def test_list_cognito_groups_env(client, env1, group, module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.cognito.Cognito.list_cognito_groups', - return_value=[{"GroupName": 'cognitos'}, {"GroupName": 'testadmins'}], - ) - response = client.query( - """ - query listCognitoGroups ( - $filter: CognitoGroupFilter - ) { - listCognitoGroups ( - filter: $filter - ){ - groupName - } - } - """, - username='alice', - filter={'type': 'environment', 'uri': env1.environmentUri}, - ) - assert response.data.listCognitoGroups[0].groupName == 'cognitos' - - diff --git a/tests/api/test_keyvaluetag.py b/tests/api/test_keyvaluetag.py deleted file mode 100644 index 16e2827a7..000000000 --- a/tests/api/test_keyvaluetag.py +++ /dev/null @@ -1,114 +0,0 @@ -from typing import List - -import dataall -from dataall.db import models -import pytest - -from dataall.db import exceptions - - -@pytest.fixture(scope='module') -def org1(db, org, tenant, user, group) -> models.Organization: - org = org('testorg', user.userName, group.name) - yield org - - -@pytest.fixture(scope='module') -def env1( - db, org1: models.Organization, user, group, module_mocker, env -) -> models.Environment: - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset, module_mocker) -> models.Dataset: - with db.scoped_session() as session: - yield dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - - -def list_tags_query(client, dataset1, target_type=None): - query = client.query( - """ - query listKeyValueTags($targetUri:String!, $targetType:String!){ - listKeyValueTags(targetUri:$targetUri, targetType:$targetType){ - tagUri - targetUri - targetType - key - value - cascade - } - } - """, - targetUri=dataset1.datasetUri, - targetType=target_type or 'dataset', - username='alice', - groups=[dataset1.SamlAdminGroupName], - ) - return query - - -def test_empty_key_value_tags(client, dataset1): - response = list_tags_query(client, dataset1) - print(response) - assert len(response.data.listKeyValueTags) == 0 - - -def test_unsupported_target_type(db, dataset1): - with pytest.raises(exceptions.InvalidInput): - assert dataall.db.api.TargetType.is_supported_target_type('unknown') - - -def test_update_key_value_tags(client, dataset1): - response = client.query( - """ - mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ - updateKeyValueTags(input:$input){ - tagUri - targetUri - targetType - key - value - cascade - } - } - """, - input=dict( - targetUri=dataset1.datasetUri, - targetType='dataset', - tags=[{'key': 'tag1', 'value': 'value1', 'cascade': False}], - ), - username='alice', - groups=[dataset1.SamlAdminGroupName], - ) - assert len(response.data.updateKeyValueTags) == 1 - - response = list_tags_query(client, dataset1) - assert response.data.listKeyValueTags[0].key == 'tag1' - assert response.data.listKeyValueTags[0].value == 'value1' - assert response.data.listKeyValueTags[0].cascade == False - - response = client.query( - """ - mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ - updateKeyValueTags(input:$input){ - tagUri - targetUri - targetType - key - value - cascade - } - } - """, - input=dict(targetUri=dataset1.datasetUri, targetType='dataset', tags=[]), - username='alice', - groups=[dataset1.SamlAdminGroupName], - ) - assert len(response.data.updateKeyValueTags) == 0 - - response = list_tags_query(client, dataset1) - assert len(response.data.listKeyValueTags) == 0 diff --git a/tests/api/test_redshift_cluster.py b/tests/api/test_redshift_cluster.py deleted file mode 100644 index 3b26fb5e6..000000000 --- a/tests/api/test_redshift_cluster.py +++ /dev/null @@ -1,486 +0,0 @@ -import typing - - -import pytest -import dataall -from dataall.api.constants import RedshiftClusterRole - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset1(db, user, env1, org1, dataset, group, group3) -> dataall.db.models.Dataset: - with db.scoped_session() as session: - data = dict( - label='label', - owner=user.userName, - SamlAdminGroupName=group.name, - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - stewards=group3.name, - ) - dataset = dataall.db.api.Dataset.create_dataset( - session=session, - username=user.userName, - groups=[group.name], - uri=env1.environmentUri, - data=data, - check_perm=True, - ) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table1(table, dataset1): - yield table(dataset1, name='table1', username=dataset1.owner) - - -@pytest.fixture(scope='module') -def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: - yield org('org2', user2.userName, group2.name) - - -@pytest.fixture(scope='module') -def env2( - env: typing.Callable, org2: dataall.db.models.Organization, user2, group2, tenant -) -> dataall.db.models.Environment: - yield env(org2, 'dev', user2.userName, group2.name, '2' * 12, 'eu-west-1') - - -@pytest.fixture(scope='module') -def dataset2(env2, org2, dataset, group2, user2) -> dataall.db.models.Dataset: - yield dataset( - org=org2, - env=env2, - name=user2.userName, - owner=env2.owner, - group=group2.name, - ) - - -@pytest.fixture(scope='module', autouse=True) -def table2(table, dataset2): - yield table(dataset2, name='table2', username=dataset2.owner) - - -@pytest.fixture(scope='module') -def cluster(env1, org1, client, group): - ouri = org1.organizationUri - euri = env1.environmentUri - group_name = group.name - res = client.query( - """ - mutation createRedshiftCluster { - createRedshiftCluster( - environmentUri:"%(euri)s", - clusterInput:{ - label : "mycluster", - description:"a test cluster", - vpc: "vpc-12345", - databaseName: "mydb", - masterDatabaseName: "masterDatabaseName", - masterUsername:"masterUsername", - nodeType: "multi-node", - numberOfNodes: 2, - subnetIds: ["subnet-1","subnet-2"], - securityGroupIds: ["sg-1","sg-2"], - tags:["test"], - SamlGroupName: "%(group_name)s" - } - ){ - clusterUri - label - description - tags - databaseName - masterDatabaseName - masterUsername - nodeType - numberOfNodes - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - owner - - } - } - """ - % vars(), - 'alice', - groups=[group_name], - ) - print(res) - yield res.data.createRedshiftCluster - - -def test_create(cluster): - assert cluster.clusterUri is not None - assert cluster.label == 'mycluster' - assert cluster.description == 'a test cluster' - assert cluster.tags[0] == 'test' - assert cluster.databaseName == 'mydb' - assert cluster.masterDatabaseName == 'masterDatabaseName' - assert cluster.masterUsername == 'masterUsername' - assert cluster.nodeType == 'multi-node' - assert cluster.numberOfNodes == 2 - assert cluster.subnetIds[0] == 'subnet-1' - assert cluster.securityGroupIds[0] == 'sg-1' - assert cluster.userRoleForCluster == RedshiftClusterRole.Creator.name - - -def test_get_cluster_as_owner(cluster, client, group): - duri = cluster.clusterUri - res = client.query( - """ - query getRedshiftCluster{ - getRedshiftCluster(clusterUri:"%(duri)s"){ - clusterUri - owner - label - description - tags - masterDatabaseName - masterUsername - nodeType - numberOfNodes - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - } - } - """ - % vars(), - username='alice', - groups=[group.name], - ) - print(res) - assert res.data.getRedshiftCluster.clusterUri == duri - - -def test_get_cluster_anonymous(cluster, client): - print(' [¨] ' * 10) - duri = cluster.clusterUri - res = client.query( - """ - query getRedshiftCluster{ - getRedshiftCluster(clusterUri:"%(duri)s"){ - clusterUri - label - description - tags - masterDatabaseName - masterUsername - nodeType - numberOfNodes - subnetIds - securityGroupIds - userRoleForCluster - userRoleInEnvironment - } - } - """ - % vars(), - username='bob', - ) - print(res) - assert not res.data.getRedshiftCluster - - -def test_list_env_clusters_no_filter(env1, cluster, client, group): - euri = env1.environmentUri - res = client.query( - """ - query listEnvironmentClusters{ - listEnvironmentClusters(environmentUri:"%(euri)s"){ - count - nodes{ - clusterUri - label - userRoleForCluster - } - } - } - """ - % vars(), - username='alice', - groups=[group.name], - ) - print(res) - assert res.data.listEnvironmentClusters.count == 1 - - -def test_list_env_clusters_filter_term(env1, cluster, client, group): - euri = env1.environmentUri - res = client.query( - """ - query listEnvironmentClusters{ - listEnvironmentClusters(environmentUri:"%(euri)s", - filter:{ - term : "mycluster" - } - ){ - count - nodes{ - clusterUri - label - userRoleForCluster - } - } - } - """ - % vars(), - username='alice', - groups=[group.name], - ) - assert res.data.listEnvironmentClusters.count == 1 - - -# def test_list_cluster_available_datasets(env1, cluster, dataset1, client, group): -# res = client.query( -# """ -# query ListRedshiftClusterAvailableDatasets($clusterUri:String!,$filter:RedshiftClusterDatasetFilter){ -# listRedshiftClusterAvailableDatasets(clusterUri:$clusterUri,filter:$filter){ -# count -# page -# pages -# hasNext -# hasPrevious -# nodes{ -# datasetUri -# name -# label -# region -# tags -# userRoleForDataset -# redshiftClusterPermission(clusterUri:$clusterUri) -# description -# organization{ -# name -# organizationUri -# label -# } -# statistics{ -# tables -# locations -# } -# environment{ -# environmentUri -# name -# AwsAccountId -# SamlGroupName -# region -# } -# -# } -# } -# }""", -# clusterUri=cluster.clusterUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# assert res.data.listRedshiftClusterAvailableDatasets.count == 2 -# - -# def test_add_dataset_to_cluster(env1, cluster, dataset1, client, db, group): -# with db.scoped_session() as session: -# cluster = session.query(dataall.db.models.RedshiftCluster).get( -# cluster.clusterUri -# ) -# cluster.status = 'available' -# session.commit() -# res = client.query( -# """ -# mutation addDatasetToRedshiftCluster( -# $clusterUri:String, -# $datasetUri:String, -# ){ -# addDatasetToRedshiftCluster( -# clusterUri:$clusterUri, -# datasetUri:$datasetUri -# ) -# } -# """, -# clusterUri=cluster.clusterUri, -# datasetUri=dataset1.datasetUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# -# -# def test_cluster_tables_copy(env1, cluster, dataset1, env2, client, db, group): -# res = client.query( -# """ -# query listRedshiftClusterAvailableDatasetTables($clusterUri:String!,$filter:DatasetTableFilter){ -# listRedshiftClusterAvailableDatasetTables(clusterUri:$clusterUri,filter:$filter){ -# count -# page -# pages -# hasNext -# hasPrevious -# count -# nodes{ -# tableUri -# name -# label -# GlueDatabaseName -# GlueTableName -# S3Prefix -# } -# } -# }""", -# clusterUri=cluster.clusterUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# assert res.data.listRedshiftClusterAvailableDatasetTables.count == 2 -# -# table = res.data.listRedshiftClusterAvailableDatasetTables.nodes[0] -# -# res = client.query( -# """ -# mutation enableRedshiftClusterDatasetTableCopy( -# $clusterUri:String!, -# $datasetUri:String!, -# $tableUri:String!, -# $schema: String!, -# $dataLocation: String! -# ){ -# enableRedshiftClusterDatasetTableCopy( -# clusterUri:$clusterUri, -# datasetUri:$datasetUri, -# tableUri:$tableUri, -# schema:$schema, -# dataLocation:$dataLocation -# ) -# } -# """, -# clusterUri=cluster.clusterUri, -# datasetUri=dataset1.datasetUri, -# tableUri=table.tableUri, -# schema='myschema', -# username='alice', -# groups=[group.name], -# dataLocation='yes', -# ) -# print(res) -# assert res.data.enableRedshiftClusterDatasetTableCopy -# -# res = client.query( -# """ -# query listRedshiftClusterCopyEnabledTables($clusterUri:String!,$filter:DatasetTableFilter){ -# listRedshiftClusterCopyEnabledTables(clusterUri:$clusterUri,filter:$filter){ -# count -# page -# pages -# hasNext -# hasPrevious -# count -# nodes{ -# tableUri -# name -# label -# GlueDatabaseName -# GlueTableName -# S3Prefix -# RedshiftSchema(clusterUri:$clusterUri) -# RedshiftCopyDataLocation(clusterUri:$clusterUri) -# } -# } -# }""", -# clusterUri=cluster.clusterUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# assert res.data.listRedshiftClusterCopyEnabledTables.count == 1 -# -# res = client.query( -# """ -# mutation disableRedshiftClusterDatasetTableCopy( -# $clusterUri:String!, -# $datasetUri:String!, -# $tableUri:String! -# ){ -# disableRedshiftClusterDatasetTableCopy( -# clusterUri:$clusterUri, -# datasetUri:$datasetUri, -# tableUri:$tableUri -# ) -# } -# """, -# clusterUri=cluster.clusterUri, -# datasetUri=dataset1.datasetUri, -# tableUri=table.tableUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# assert res.data.disableRedshiftClusterDatasetTableCopy -# -# res = client.query( -# """ -# query listRedshiftClusterCopyEnabledTables($clusterUri:String!,$filter:DatasetTableFilter){ -# listRedshiftClusterCopyEnabledTables(clusterUri:$clusterUri,filter:$filter){ -# count -# page -# pages -# hasNext -# hasPrevious -# count -# nodes{ -# tableUri -# name -# label -# GlueDatabaseName -# GlueTableName -# S3Prefix -# } -# } -# }""", -# clusterUri=cluster.clusterUri, -# username='alice', -# groups=[group.name], -# ) -# print(res) -# assert res.data.listRedshiftClusterCopyEnabledTables.count == 0 -# - -def test_delete_cluster(client, cluster, env1, org1, db, module_mocker, group, user): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation deleteRedshiftCluster($clusterUri:String!,$deleteFromAWS:Boolean){ - deleteRedshiftCluster(clusterUri:$clusterUri, deleteFromAWS:$deleteFromAWS) - } - """, - clusterUri=cluster.clusterUri, - deleteFromAWS=True, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteRedshiftCluster diff --git a/tests/api/test_sagemaker_notebook.py b/tests/api/test_sagemaker_notebook.py deleted file mode 100644 index a48f51c43..000000000 --- a/tests/api/test_sagemaker_notebook.py +++ /dev/null @@ -1,209 +0,0 @@ -import pytest - -import dataall - - -@pytest.fixture(scope='module') -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module') -def env1(env, org1, user, group, tenant, module_mocker): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module', autouse=True) -def sgm_notebook(client, tenant, group, env1) -> dataall.db.models.SagemakerNotebook: - response = client.query( - """ - mutation createSagemakerNotebook($input:NewSagemakerNotebookInput){ - createSagemakerNotebook(input:$input){ - notebookUri - label - description - tags - owner - userRoleForNotebook - SamlAdminGroupName - VpcId - SubnetId - VolumeSizeInGB - InstanceType - } - } - """, - input={ - 'label': 'my pipeline', - 'SamlAdminGroupName': group.name, - 'tags': [group.name], - 'environmentUri': env1.environmentUri, - 'VpcId': 'vpc-123567', - 'SubnetId': 'subnet-123567', - 'VolumeSizeInGB': 32, - 'InstanceType': 'ml.m5.xlarge', - }, - username='alice', - groups=[group.name], - ) - assert response.data.createSagemakerNotebook.notebookUri - assert response.data.createSagemakerNotebook.SamlAdminGroupName == group.name - assert response.data.createSagemakerNotebook.VpcId == 'vpc-123567' - assert response.data.createSagemakerNotebook.SubnetId == 'subnet-123567' - assert response.data.createSagemakerNotebook.InstanceType == 'ml.m5.xlarge' - assert response.data.createSagemakerNotebook.VolumeSizeInGB == 32 - return response.data.createSagemakerNotebook - - -@pytest.fixture(scope='module', autouse=True) -def patch_aws(module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.sagemaker.Sagemaker.start_instance', - return_value='Starting', - ) - module_mocker.patch( - 'dataall.aws.handlers.sagemaker.Sagemaker.stop_instance', return_value=True - ) - module_mocker.patch( - 'dataall.aws.handlers.sagemaker.Sagemaker.get_notebook_instance_status', - return_value='INSERVICE', - ) - - -def test_list_notebooks(client, env1, db, org1, user, group, sgm_notebook, patch_aws): - response = client.query( - """ - query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ - listSagemakerNotebooks(filter:$filter){ - count - nodes{ - NotebookInstanceStatus - notebookUri - environment { - environmentUri - } - organization { - organizationUri - } - } - } - } - """, - filter=None, - username=user.userName, - groups=[group.name], - ) - assert len(response.data.listSagemakerNotebooks['nodes']) == 1 - - -def test_nopermissions_list_notebooks( - client, env1, db, org1, user2, group2, sgm_notebook, patch_aws -): - response = client.query( - """ - query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ - listSagemakerNotebooks(filter:$filter){ - count - nodes{ - NotebookInstanceStatus - notebookUri - environment { - environmentUri - } - organization { - organizationUri - } - } - } - } - """, - filter=None, - username=user2.userName, - groups=[group2.name], - ) - assert len(response.data.listSagemakerNotebooks['nodes']) == 0 - - -def test_get_notebook(client, env1, db, org1, user, group, sgm_notebook, patch_aws): - - response = client.query( - """ - query getSagemakerNotebook($notebookUri:String!){ - getSagemakerNotebook(notebookUri:$notebookUri){ - notebookUri - NotebookInstanceStatus - } - } - """, - notebookUri=sgm_notebook.notebookUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getSagemakerNotebook.notebookUri == sgm_notebook.notebookUri - - -def test_action_notebook(client, env1, db, org1, user, group, sgm_notebook, patch_aws): - response = client.query( - """ - mutation stopSagemakerNotebook($notebookUri:String!){ - stopSagemakerNotebook(notebookUri:$notebookUri) - } - """, - notebookUri=sgm_notebook.notebookUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.stopSagemakerNotebook == 'Stopping' - - response = client.query( - """ - mutation startSagemakerNotebook($notebookUri:String!){ - startSagemakerNotebook(notebookUri:$notebookUri) - } - """, - notebookUri=sgm_notebook.notebookUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.startSagemakerNotebook == 'Starting' - - -def test_delete_notebook(client, env1, db, org1, user, group, patch_aws, sgm_notebook): - - response = client.query( - """ - mutation deleteSagemakerNotebook($notebookUri:String!,$deleteFromAWS:Boolean){ - deleteSagemakerNotebook(notebookUri:$notebookUri,deleteFromAWS:$deleteFromAWS) - } - """, - notebookUri=sgm_notebook.notebookUri, - deleteFromAWS=True, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteSagemakerNotebook - response = client.query( - """ - query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ - listSagemakerNotebooks(filter:$filter){ - count - nodes{ - NotebookInstanceStatus - notebookUri - environment { - environmentUri - } - organization { - organizationUri - } - } - } - } - """, - filter=None, - username=user.userName, - groups=[group.name], - ) - assert len(response.data.listSagemakerNotebooks['nodes']) == 0 diff --git a/tests/api/test_sagemaker_studio.py b/tests/api/test_sagemaker_studio.py deleted file mode 100644 index 70f903c73..000000000 --- a/tests/api/test_sagemaker_studio.py +++ /dev/null @@ -1,124 +0,0 @@ -import typing -import pytest - -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant, module_mocker): - env1 = env(org1, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') - yield env1 - - -def test_add_sm_user_profile(client, db, env1, org1, group, module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.sagemaker_studio.SagemakerStudio.get_sagemaker_studio_domain', - return_value={'DomainId': 'test'}, - ) - for i in range(0, 10): - response = client.query( - """ - mutation createSagemakerStudioUserProfile($input:NewSagemakerStudioUserProfileInput){ - createSagemakerStudioUserProfile(input:$input){ - sagemakerStudioUserProfileUri - name - label - created - description - SamlAdminGroupName - environmentUri - tags - } - } - """, - input={ - 'label': f'test{i}', - 'SamlAdminGroupName': group.name, - 'environmentUri': env1.environmentUri, - }, - username='alice', - groups=[group.name], - ) - assert response.data.createSagemakerStudioUserProfile.label == f'test{i}' - assert ( - response.data.createSagemakerStudioUserProfile.SamlAdminGroupName - == group.name - ) - assert ( - response.data.createSagemakerStudioUserProfile.environmentUri - == env1.environmentUri - ) - - -def test_list_sagemaker_studio_user_profiles(client, env1, db, org1, group): - response = client.query( - """ - query listSagemakerStudioUserProfiles($filter:SagemakerStudioUserProfileFilter!){ - listSagemakerStudioUserProfiles(filter:$filter){ - count - nodes{ - sagemakerStudioUserProfileUri - } - } - } - """, - filter={}, - username='alice', - ) - print(response.data) - assert len(response.data.listSagemakerStudioUserProfiles['nodes']) == 10 - - -def test_nopermissions_list_sagemaker_studio_user_profiles( - client, env1, db, org1, group -): - response = client.query( - """ - query listSagemakerStudioUserProfiles($filter:SagemakerStudioUserProfileFilter!){ - listSagemakerStudioUserProfiles(filter:$filter){ - count - nodes{ - sagemakerStudioUserProfileUri - } - } - } - """, - filter={}, - username='bob', - ) - assert len(response.data.listSagemakerStudioUserProfiles['nodes']) == 0 - - -def test_delete_sagemaker_studio_user_profile( - client, env1, db, org1, module_mocker, group -): - with db.scoped_session() as session: - sm_user_profile = session.query( - dataall.db.models.SagemakerStudioUserProfile - ).first() - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation deleteSagemakerStudioUserProfile($sagemakerStudioUserProfileUri:String!, $deleteFromAWS:Boolean){ - deleteSagemakerStudioUserProfile(sagemakerStudioUserProfileUri:$sagemakerStudioUserProfileUri, deleteFromAWS:$deleteFromAWS) - } - """, - sagemakerStudioUserProfileUri=sm_user_profile.sagemakerStudioUserProfileUri, - deleteFromAWS=True, - username='alice', - groups=[group.name], - ) - assert response.data - with db.scoped_session() as session: - n = session.query(dataall.db.models.SagemakerStudioUserProfile).get( - sm_user_profile.sagemakerStudioUserProfileUri - ) - assert not n diff --git a/tests/api/test_stack.py b/tests/api/test_stack.py deleted file mode 100644 index fd834f7e8..000000000 --- a/tests/api/test_stack.py +++ /dev/null @@ -1,59 +0,0 @@ -def test_update_stack( - client, - tenant, - group, - pipeline, - env_fixture, - dataset_fixture, - sgm_notebook, - sgm_studio, - cluster, -): - response = update_stack_query( - client, env_fixture.environmentUri, 'environment', group.name - ) - assert response.data.updateStack.targetUri == env_fixture.environmentUri - - response = update_stack_query( - client, dataset_fixture.datasetUri, 'dataset', group.name - ) - assert response.data.updateStack.targetUri == dataset_fixture.datasetUri - - response = update_stack_query( - client, sgm_studio.sagemakerStudioUserProfileUri, 'mlstudio', group.name - ) - assert ( - response.data.updateStack.targetUri == sgm_studio.sagemakerStudioUserProfileUri - ) - - response = update_stack_query( - client, sgm_notebook.notebookUri, 'notebook', group.name - ) - assert response.data.updateStack.targetUri == sgm_notebook.notebookUri - - response = update_stack_query(client, cluster.clusterUri, 'redshift', group.name) - assert response.data.updateStack.targetUri == cluster.clusterUri - - response = update_stack_query( - client, pipeline.DataPipelineUri, 'pipeline', group.name - ) - assert response.data.updateStack.targetUri == pipeline.DataPipelineUri - - -def update_stack_query(client, target_uri, target_type, group): - response = client.query( - """ - mutation updateStack($targetUri:String!, $targetType:String!){ - updateStack(targetUri:$targetUri, targetType:$targetType){ - stackUri - targetUri - name - } - } - """, - targetUri=target_uri, - targetType=target_type, - username='alice', - groups=[group], - ) - return response diff --git a/tests/api/test_vote.py b/tests/api/test_vote.py deleted file mode 100644 index 1956802a9..000000000 --- a/tests/api/test_vote.py +++ /dev/null @@ -1,184 +0,0 @@ -import pytest - -from dataall.db import models - - -@pytest.fixture(scope='module') -def org1(db, org, tenant, user, group) -> models.Organization: - org = org('testorg', user.userName, group.name) - yield org - - -@pytest.fixture(scope='module') -def env1( - db, org1: models.Organization, user, group, env -) -> models.Environment: - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module', autouse=True) -def dataset1(db, env1, org1, group, user, dataset) -> models.Dataset: - with db.scoped_session() as session: - yield dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - - -@pytest.fixture(scope='module') -def dashboard(client, env1, org1, group, module_mocker, patch_es): - module_mocker.patch( - 'dataall.aws.handlers.quicksight.Quicksight.can_import_dashboard', - return_value=True, - ) - response = client.query( - """ - mutation importDashboard( - $input:ImportDashboardInput, - ){ - importDashboard(input:$input){ - dashboardUri - name - label - DashboardId - created - owner - SamlGroupName - } - } - """, - input={ - 'dashboardId': f'1234', - 'label': f'1234', - 'environmentUri': env1.environmentUri, - 'SamlGroupName': group.name, - 'terms': ['term'], - }, - username='alice', - groups=[group.name], - ) - assert response.data.importDashboard.owner == 'alice' - assert response.data.importDashboard.SamlGroupName == group.name - yield response.data.importDashboard - - -def test_count_votes(client, dataset1, dashboard): - response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 - - -def count_votes_query(client, target_uri, target_type, group): - response = client.query( - """ - query countUpVotes($targetUri:String!, $targetType:String!){ - countUpVotes(targetUri:$targetUri, targetType:$targetType) - } - """, - targetUri=target_uri, - targetType=target_type, - username='alice', - groups=[group], - ) - return response - - -def get_vote_query(client, target_uri, target_type, group): - response = client.query( - """ - query getVote($targetUri:String!, $targetType:String!){ - getVote(targetUri:$targetUri, targetType:$targetType){ - upvote - } - } - """, - targetUri=target_uri, - targetType=target_type, - username='alice', - groups=[group], - ) - return response - - -def test_upvote(patch_es, client, dataset1, module_mocker, dashboard): - module_mocker.patch('dataall.api.Objects.Vote.resolvers.reindex', return_value={}) - response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', True, dataset1.SamlAdminGroupName - ) - assert response.data.upVote.upvote - response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 1 - response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.getVote.upvote - - response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', True, dataset1.SamlAdminGroupName - ) - assert response.data.upVote.upvote - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 1 - response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName - ) - assert response.data.getVote.upvote - - response = upvote_mutation( - client, dataset1.datasetUri, 'dataset', False, dataset1.SamlAdminGroupName - ) - assert not response.data.upVote.upvote - response = upvote_mutation( - client, dashboard.dashboardUri, 'dashboard', False, dataset1.SamlAdminGroupName - ) - - assert not response.data.upVote.upvote - response = get_vote_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert not response.data.getVote.upvote - response = get_vote_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName - ) - assert not response.data.getVote.upvote - - response = count_votes_query( - client, dataset1.datasetUri, 'dataset', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 - response = count_votes_query( - client, dashboard.dashboardUri, 'dashboard', dataset1.SamlAdminGroupName - ) - assert response.data.countUpVotes == 0 - - -def upvote_mutation(client, target_uri, target_type, upvote, group): - response = client.query( - """ - mutation upVote($input:VoteInput!){ - upVote(input:$input){ - voteUri - targetUri - targetType - upvote - } - } - """, - input=dict( - targetUri=target_uri, - targetType=target_type, - upvote=upvote, - ), - username='alice', - groups=[group], - ) - return response diff --git a/tests/api/test_vpc.py b/tests/api/test_vpc.py deleted file mode 100644 index a223b7e4e..000000000 --- a/tests/api/test_vpc.py +++ /dev/null @@ -1,153 +0,0 @@ -import pytest - -import dataall - - -@pytest.fixture(scope='module') -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module') -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', user.userName, group.name, '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module', autouse=True) -def vpc(env1, group, client) -> dataall.db.models.Vpc: - response = client.query( - """ - mutation createNetwork($input:NewVpcInput){ - createNetwork(input:$input){ - vpcUri - label - description - tags - owner - SamlGroupName - privateSubnetIds - privateSubnetIds - } - } - """, - input={ - 'label': 'myvpc', - 'SamlGroupName': group.name, - 'tags': [group.name], - 'vpcId': 'vpc-12345678', - 'privateSubnetIds': ['sub1', 'sub2'], - 'publicSubnetIds': ['sub1', 'sub2'], - 'environmentUri': env1.environmentUri, - }, - username='alice', - groups=[group.name], - ) - assert response.data.createNetwork.SamlGroupName - assert response.data.createNetwork.label - yield response.data.createNetwork - - -def test_list_networks(client, env1, db, org1, user, group, vpc): - response = client.query( - """ - query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ - listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ - count - nodes{ - VpcId - SamlGroupName - publicSubnetIds - privateSubnetIds - default - } - } - } - """, - environmentUri=env1.environmentUri, - filter=None, - username='alice', - groups=[group.name], - ) - print(response) - - assert response.data.listEnvironmentNetworks.count == 2 - - -def test_list_networks_nopermissions(client, env1, db, org1, user, group2, vpc): - response = client.query( - """ - query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ - listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ - count - nodes{ - VpcId - SamlGroupName - publicSubnetIds - privateSubnetIds - default - } - } - } - """, - environmentUri=env1.environmentUri, - filter=None, - username='bob', - groups=[group2.name], - ) - assert 'UnauthorizedOperation' in response.errors[0].message - - -def test_get_network(client, env1, db, org1, user, group, vpc, module_mocker): - response = client.query( - """ - query getNetwork($vpcUri:String!){ - getNetwork(vpcUri:$vpcUri){ - vpcUri - } - } - """, - vpcUri=vpc.vpcUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.getNetwork.vpcUri == vpc.vpcUri - - -def test_delete_network(client, env1, db, org1, user, group, module_mocker, vpc): - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True - ) - response = client.query( - """ - mutation deleteNetwork($vpcUri:String!){ - deleteNetwork(vpcUri:$vpcUri) - } - """, - vpcUri=vpc.vpcUri, - username=user.userName, - groups=[group.name], - ) - assert response.data.deleteNetwork - response = client.query( - """ - query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ - listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ - count - nodes{ - VpcId - SamlGroupName - publicSubnetIds - privateSubnetIds - default - } - } - } - """, - environmentUri=env1.environmentUri, - filter=None, - username='alice', - groups=[group.name], - ) - assert len(response.data.listEnvironmentNetworks['nodes']) == 1 diff --git a/tests/api/test_worksheet.py b/tests/api/test_worksheet.py deleted file mode 100644 index 0cbae9da7..000000000 --- a/tests/api/test_worksheet.py +++ /dev/null @@ -1,277 +0,0 @@ -import pytest -from dataall.api.constants import WorksheetRole - - -@pytest.fixture(scope='module', autouse=True) -def worksheet(client, tenant, group): - response = client.query( - """ - mutation CreateWorksheet ($input:NewWorksheetInput){ - createWorksheet(input:$input){ - worksheetUri - label - description - tags - owner - userRoleForWorksheet - } - } - """, - input={ - 'label': 'my worksheet', - 'SamlAdminGroupName': group.name, - 'tags': [group.name], - }, - username='alice', - groups=[group.name], - tags=[group.name], - ) - return response.data.createWorksheet - - -def test_create_worksheet(client, worksheet): - assert worksheet.label == 'my worksheet' - assert worksheet.owner == 'alice' - assert worksheet.userRoleForWorksheet == WorksheetRole.Creator.name - - -def test_list_worksheets_as_creator(client, group): - response = client.query( - """ - query ListWorksheets ($filter:WorksheetFilter){ - listWorksheets (filter:$filter){ - count - page - pages - nodes{ - worksheetUri - label - description - tags - owner - userRoleForWorksheet - } - } - } - """, - filter={'page': 1}, - username='alice', - groups=[group.name], - ) - - assert response.data.listWorksheets.count == 1 - - -def test_list_worksheets_as_anonymous(client, group): - response = client.query( - """ - query ListWorksheets ($filter:WorksheetFilter){ - listWorksheets (filter:$filter){ - count - page - pages - nodes{ - worksheetUri - label - description - tags - owner - userRoleForWorksheet - } - } - } - """, - filter={'page': 1}, - username='anonymous', - ) - - print(response) - assert response.data.listWorksheets.count == 0 - - -def test_get_worksheet(client, worksheet, group): - response = client.query( - """ - query GetWorksheet($worksheetUri:String!){ - getWorksheet(worksheetUri:$worksheetUri){ - label - description - userRoleForWorksheet - } - } - """, - worksheetUri=worksheet.worksheetUri, - username='alice', - groups=[group.name], - ) - - assert response.data.getWorksheet.userRoleForWorksheet == WorksheetRole.Creator.name - - response = client.query( - """ - query GetWorksheet($worksheetUri:String!){ - getWorksheet(worksheetUri:$worksheetUri){ - label - description - userRoleForWorksheet - } - } - """, - worksheetUri=worksheet.worksheetUri, - username='anonymous', - ) - - assert 'Unauthorized' in response.errors[0].message - - -def test_update_worksheet(client, worksheet, group): - response = client.query( - """ - mutation UpdateWorksheet($worksheetUri:String!, $input:UpdateWorksheetInput){ - updateWorksheet( - worksheetUri:$worksheetUri, - input:$input - ){ - worksheetUri - label - } - } - """, - worksheetUri=worksheet.worksheetUri, - input={'label': 'change label'}, - username='alice', - groups=[group.name], - ) - - assert response.data.updateWorksheet.label == 'change label' - - -def test_share_with_individual(client, worksheet, group2, group): - response = client.query( - """ - mutation ShareWorksheet( - $worksheetUri:String!, - $input: WorksheetShareInput! - ){ - shareWorksheet(worksheetUri:$worksheetUri,input:$input){ - worksheetShareUri - canEdit - } - } - """, - worksheetUri=worksheet.worksheetUri, - input={'principalId': group2.name, 'principalType': 'Group', 'canEdit': False}, - username='alice', - groups=[group.name], - ) - share_uri = response.data.shareWorksheet.worksheetShareUri - assert share_uri - assert not response.data.shareWorksheet.canEdit - - response = client.query( - """ - mutation UpdateShareWorksheet( - $worksheetShareUri:String!, - $canEdit: Boolean! - ){ - updateShareWorksheet(worksheetShareUri:$worksheetShareUri,canEdit:$canEdit){ - worksheetShareUri - canEdit - } - } - """, - worksheetShareUri=share_uri, - canEdit=True, - username='alice', - groups=[group.name], - ) - share_uri = response.data.updateShareWorksheet.worksheetShareUri - assert share_uri - assert response.data.updateShareWorksheet.canEdit - - response = client.query( - """ - query GetWorksheet($worksheetUri:String!){ - getWorksheet(worksheetUri:$worksheetUri){ - label - description - userRoleForWorksheet - } - } - """, - worksheetUri=worksheet.worksheetUri, - username='bob', - groups=[group2.name], - ) - - assert response.data.getWorksheet.label == 'change label' - - response = client.query( - """ - query GetWorksheet($worksheetUri:String!){ - getWorksheet(worksheetUri:$worksheetUri){ - label - description - userRoleForWorksheet - shares{ - count - } - lastSavedQueryResult - { - AthenaQueryId - } - - } - } - """, - worksheetUri=worksheet.worksheetUri, - username='bob', - groups=[group2.name], - ) - - assert response.data.getWorksheet.label == 'change label' - - response = client.query( - """ - mutation deleteShareWorksheet( - $worksheetShareUri:String! - ){ - deleteShareWorksheet(worksheetShareUri:$worksheetShareUri) - } - """, - worksheetShareUri=share_uri, - username='alice', - groups=[group.name], - ) - assert response.data.deleteShareWorksheet - - response = client.query( - """ - query GetWorksheet($worksheetUri:String!){ - getWorksheet(worksheetUri:$worksheetUri){ - label - description - userRoleForWorksheet - } - } - """, - worksheetUri=worksheet.worksheetUri, - username='bob', - groups=[group2.name], - ) - - assert 'UnauthorizedOperation' in response.errors[0].message - - response = client.query( - """ - mutation deleteWorksheet( - $worksheetUri:String! - ){ - deleteWorksheet(worksheetUri:$worksheetUri) - } - """, - worksheetUri=worksheet.worksheetUri, - username='alice', - groups=[group.name], - ) - assert response.data.deleteWorksheet diff --git a/tests/base/__init__.py b/tests/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/base/api/__init__.py b/tests/base/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/base/api/gql/__init__.py b/tests/base/api/gql/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/base/api/gql/conftest.py b/tests/base/api/gql/conftest.py new file mode 100644 index 000000000..769e6a4f4 --- /dev/null +++ b/tests/base/api/gql/conftest.py @@ -0,0 +1,15 @@ +import pytest + +from dataall.base.api import gql + + +@pytest.fixture(scope='function', autouse=True) +def reset(): + tmp = (gql.ObjectType.class_instances, gql.QueryField.class_instances, gql.MutationField.class_instances) + gql.ObjectType.class_instances = {} + gql.QueryField.class_instances = {} + gql.MutationField.class_instances = {} + + yield + + gql.ObjectType.class_instances, gql.QueryField.class_instances, gql.MutationField.class_instances = tmp diff --git a/tests/utils/gql/test_argument.py b/tests/base/api/gql/test_argument.py similarity index 84% rename from tests/utils/gql/test_argument.py rename to tests/base/api/gql/test_argument.py index 32cc3b255..e5c9be0bc 100644 --- a/tests/utils/gql/test_argument.py +++ b/tests/base/api/gql/test_argument.py @@ -1,12 +1,12 @@ -import dataall.api.gql as gql +import dataall.base.api.gql as gql def test_base(): arg = gql.Argument(name='foo', type=gql.String) assert arg.gql() == 'foo : String' - arg = gql.Argument(name='foo', type=gql.Number) - assert arg.gql() == 'foo : Number' + arg = gql.Argument(name='foo', type=gql.Integer) + assert arg.gql() == 'foo : Int' arg = gql.Argument(name='foo', type=gql.NonNullableType(gql.String)) assert arg.gql() == 'foo : String!' @@ -60,8 +60,8 @@ def test_arg_from_input_type(): point_input = gql.InputType( name='PointInput', arguments=[ - gql.Argument(name='x', type=gql.Number), - gql.Argument(name='y', type=gql.Number), + gql.Argument(name='x', type=gql.Integer), + gql.Argument(name='y', type=gql.Integer), ], ) point_arg = gql.Argument(name='point', type=point_input) @@ -78,19 +78,19 @@ def test_input_type_with_arg(): input_type = gql.InputType( name='NewPointInputType', arguments=[ - gql.Argument(name='x', type=gql.Number), - gql.Argument(name='y', type=gql.Number), + gql.Argument(name='x', type=gql.Integer), + gql.Argument(name='y', type=gql.Integer), ], ) - assert input_type.gql() == 'input NewPointInputType{ x : Number, y : Number }' + assert input_type.gql() == 'input NewPointInputType{ x : Int, y : Int }' def test_nested_input(): point_input_type = gql.InputType( name='NewPointInputType', arguments=[ - gql.Argument(name='x', type=gql.Number), - gql.Argument(name='y', type=gql.Number), + gql.Argument(name='x', type=gql.Integer), + gql.Argument(name='y', type=gql.Integer), ], ) diff --git a/tests/utils/gql/test_cached.py b/tests/base/api/gql/test_cached.py similarity index 81% rename from tests/utils/gql/test_cached.py rename to tests/base/api/gql/test_cached.py index 71aa7b786..2c297d1eb 100644 --- a/tests/utils/gql/test_cached.py +++ b/tests/base/api/gql/test_cached.py @@ -1,12 +1,4 @@ -import pytest -from dataall.api import gql - - -@pytest.fixture(scope='module', autouse=True) -def reset(): - gql.ObjectType.class_instances = {} - gql.QueryField.class_instances = {} - gql.MutationField.class_instances = {} +from dataall.base.api import gql def test_cached_types(): diff --git a/tests/utils/gql/test_enum.py b/tests/base/api/gql/test_enum.py similarity index 97% rename from tests/utils/gql/test_enum.py rename to tests/base/api/gql/test_enum.py index 026646583..188436403 100644 --- a/tests/utils/gql/test_enum.py +++ b/tests/base/api/gql/test_enum.py @@ -1,7 +1,7 @@ from enum import Enum import pytest -import dataall.api.gql as gql +import dataall.base.api.gql as gql @pytest.fixture(scope='module') diff --git a/tests/utils/gql/test_field.py b/tests/base/api/gql/test_field.py similarity index 98% rename from tests/utils/gql/test_field.py rename to tests/base/api/gql/test_field.py index 0031795e9..2aaecfd4e 100644 --- a/tests/utils/gql/test_field.py +++ b/tests/base/api/gql/test_field.py @@ -1,4 +1,4 @@ -import dataall.api.gql as gql +import dataall.base.api.gql as gql def test_base_field(): diff --git a/tests/utils/gql/test_get_named_type.py b/tests/base/api/gql/test_get_named_type.py similarity index 97% rename from tests/utils/gql/test_get_named_type.py rename to tests/base/api/gql/test_get_named_type.py index 12744cd4a..cb384113c 100644 --- a/tests/utils/gql/test_get_named_type.py +++ b/tests/base/api/gql/test_get_named_type.py @@ -1,4 +1,4 @@ -import dataall.api.gql as gql +import dataall.base.api.gql as gql def test_scalar(): diff --git a/tests/utils/gql/test_object_type.py b/tests/base/api/gql/test_object_type.py similarity index 96% rename from tests/utils/gql/test_object_type.py rename to tests/base/api/gql/test_object_type.py index a7e1527a8..1069105ae 100644 --- a/tests/utils/gql/test_object_type.py +++ b/tests/base/api/gql/test_object_type.py @@ -1,4 +1,4 @@ -import dataall.api.gql as gql +import dataall.base.api.gql as gql def test_base_object(): diff --git a/tests/utils/gql/test_scalar.py b/tests/base/api/gql/test_scalar.py similarity index 82% rename from tests/utils/gql/test_scalar.py rename to tests/base/api/gql/test_scalar.py index f89f2609c..b914de68e 100644 --- a/tests/utils/gql/test_scalar.py +++ b/tests/base/api/gql/test_scalar.py @@ -1,4 +1,4 @@ -from dataall.api.gql import String, Number, Boolean, Date +from dataall.base.api.gql import String, Number, Boolean, Date def test_basic(): diff --git a/tests/utils/gql/test_type_modifiers.py b/tests/base/api/gql/test_type_modifiers.py similarity index 97% rename from tests/utils/gql/test_type_modifiers.py rename to tests/base/api/gql/test_type_modifiers.py index 9d73eca6a..f0c2f5bd3 100644 --- a/tests/utils/gql/test_type_modifiers.py +++ b/tests/base/api/gql/test_type_modifiers.py @@ -1,4 +1,4 @@ -import dataall.api.gql as gql +import dataall.base.api.gql as gql def test_non_nullable_modifier_scalar(): diff --git a/tests/base/db/__init__.py b/tests/base/db/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/base/db/test_connect.py b/tests/base/db/test_connect.py new file mode 100644 index 000000000..9d0883fc7 --- /dev/null +++ b/tests/base/db/test_connect.py @@ -0,0 +1,19 @@ +import os +import dataall + + +def test(db: dataall.base.db.Engine): + if os.getenv('local') or os.getenv('pytest'): + config: dataall.base.db.DbConfig = db.dbconfig + print(config) + assert config.host == 'localhost' + assert config.schema == 'pytest' + with db.scoped_session() as session: + models = [] + models = models + dataall.base.db.Base.__subclasses__() + models = models + dataall.base.db.Resource.__subclasses__() + for model in models: + nb = session.query(model).count() + assert nb == 0 + else: + assert True diff --git a/tests/db/test_dbconfig.py b/tests/base/db/test_dbconfig.py similarity index 97% rename from tests/db/test_dbconfig.py rename to tests/base/db/test_dbconfig.py index 67476d514..45d7ff5d4 100644 --- a/tests/db/test_dbconfig.py +++ b/tests/base/db/test_dbconfig.py @@ -1,6 +1,6 @@ import pytest -from dataall.db import DbConfig +from dataall.base.db import DbConfig def test_incorrect_database(): diff --git a/tests/base/test_config.py b/tests/base/test_config.py new file mode 100644 index 000000000..61eb69ff9 --- /dev/null +++ b/tests/base/test_config.py @@ -0,0 +1,19 @@ +from dataall.base.config import config + + +def test_config(): + config.set_property("k1", "v1") + assert config.get_property("k1") == "v1" + + assert config.get_property("not_exist", "default1") == "default1" + + config.set_property("a.b.c", "d") + assert config.get_property("a.b.c") == "d" + assert "c" in config.get_property("a.b") + assert "k" not in config.get_property("a.b") + assert config.get_property("a.b.k", "default2") == "default2" + assert "b" in config.get_property("a") + + config.set_property("a.b.e", "f") + assert config.get_property("a.b.c") == "d" + assert config.get_property("a.b.e") == "f" diff --git a/tests/base/utils/__init__.py b/tests/base/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/utils/test_naming_convention.py b/tests/base/utils/test_naming_convention.py similarity index 97% rename from tests/utils/test_naming_convention.py rename to tests/base/utils/test_naming_convention.py index cec941171..83c905c04 100644 --- a/tests/utils/test_naming_convention.py +++ b/tests/base/utils/test_naming_convention.py @@ -1,4 +1,4 @@ -from dataall.utils.naming_convention import ( +from dataall.base.utils.naming_convention import ( NamingConventionService, NamingConventionPattern, ) diff --git a/tests/cdkproxy/conftest.py b/tests/cdkproxy/conftest.py deleted file mode 100644 index d2160dde5..000000000 --- a/tests/cdkproxy/conftest.py +++ /dev/null @@ -1,269 +0,0 @@ -import pytest - -from dataall.db import models, api - - -@pytest.fixture(scope='module', autouse=True) -def permissions(db): - with db.scoped_session() as session: - yield api.Permission.init_permissions(session) - - -@pytest.fixture(scope='module', autouse=True) -def org(db) -> models.Organization: - with db.scoped_session() as session: - org = models.Organization( - name='org', owner='me', label='org', description='test' - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(db, org: models.Organization) -> models.Environment: - with db.scoped_session() as session: - env = models.Environment( - name='env', - owner='me', - organizationUri=org.organizationUri, - label='env', - AwsAccountId='1' * 12, - region='eu-west-1', - EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{'1'*12}:role/default_role", - EnvironmentDefaultIAMRoleName='default_role', - EnvironmentDefaultBucketName='envbucketbcuketenvbucketbcuketenvbucketbcuketenvbucketbcuket', - EnvironmentDefaultAthenaWorkGroup='DefaultWorkGroup', - CDKRoleArn='xxx', - SamlGroupName='admins', - subscriptionsEnabled=True, - subscriptionsConsumersTopicName='topicname', - ) - session.add(env) - session.commit() - env_group = models.EnvironmentGroup( - environmentUri=env.environmentUri, - groupUri=env.SamlGroupName, - environmentIAMRoleArn=env.EnvironmentDefaultIAMRoleArn, - environmentIAMRoleName=env.EnvironmentDefaultIAMRoleName, - environmentAthenaWorkGroup='workgroup', - ) - session.add(env_group) - tags = models.KeyValueTag( - targetType='environment', - targetUri=env.environmentUri, - key='CREATOR', - value='customtagowner', - ) - session.add(tags) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def another_group(db, env): - with db.scoped_session() as session: - env_group: models.EnvironmentGroup = models.EnvironmentGroup( - environmentUri=env.environmentUri, - groupUri='anothergroup', - environmentIAMRoleArn='aontherGroupArn', - environmentIAMRoleName='anotherGroupRole', - environmentAthenaWorkGroup='workgroup', - ) - session.add(env_group) - dataset = models.Dataset( - label='thisdataset', - environmentUri=env.environmentUri, - organizationUri=env.organizationUri, - name='anotherdataset', - description='test', - AwsAccountId=env.AwsAccountId, - region=env.region, - S3BucketName='bucket', - GlueDatabaseName='db', - IAMDatasetAdminRoleArn='role', - IAMDatasetAdminUserArn='xxx', - KmsAlias='xxx', - owner='me', - confidentiality='C1', - businessOwnerEmail='jeff', - businessOwnerDelegationEmails=['andy'], - SamlAdminGroupName=env_group.groupUri, - GlueCrawlerName='dhCrawler', - ) - session.add(dataset) - yield env_group - - -@pytest.fixture(scope='module', autouse=True) -def dataset(db, env: models.Environment) -> models.Dataset: - with db.scoped_session() as session: - dataset = models.Dataset( - label='thisdataset', - environmentUri=env.environmentUri, - organizationUri=env.organizationUri, - name='thisdataset', - description='test', - AwsAccountId=env.AwsAccountId, - region=env.region, - S3BucketName='bucket', - GlueDatabaseName='db', - IAMDatasetAdminRoleArn='role', - IAMDatasetAdminUserArn='xxx', - KmsAlias='xxx', - owner='me', - confidentiality='C1', - businessOwnerEmail='jeff', - businessOwnerDelegationEmails=['andy'], - SamlAdminGroupName='admins', - GlueCrawlerName='dhCrawler', - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(db, dataset: models.Dataset) -> models.DatasetTable: - with db.scoped_session() as session: - table = models.DatasetTable( - label='thistable', - owner='me', - datasetUri=dataset.datasetUri, - AWSAccountId=dataset.AwsAccountId, - region=dataset.region, - GlueDatabaseName=dataset.GlueDatabaseName, - S3BucketName=dataset.S3BucketName, - GlueTableName='asimpletesttable', - S3Prefix='/raw/asimpletesttable/', - ) - - session.add(table) - yield table - - -@pytest.fixture(scope='module', autouse=True) -def sgm_studio(db, env: models.Environment) -> models.SagemakerStudioUserProfile: - with db.scoped_session() as session: - notebook = models.SagemakerStudioUserProfile( - label='thistable', - owner='me', - AWSAccountId=env.AwsAccountId, - region=env.region, - sagemakerStudioUserProfileStatus='UP', - sagemakerStudioUserProfileName='Profile', - sagemakerStudioUserProfileNameSlugify='Profile', - sagemakerStudioDomainID='domain', - environmentUri=env.environmentUri, - RoleArn=env.EnvironmentDefaultIAMRoleArn, - SamlAdminGroupName='admins', - ) - session.add(notebook) - yield notebook - - -@pytest.fixture(scope='module', autouse=True) -def notebook(db, env: models.Environment) -> models.SagemakerNotebook: - with db.scoped_session() as session: - notebook = models.SagemakerNotebook( - label='thistable', - NotebookInstanceStatus='RUNNING', - owner='me', - AWSAccountId=env.AwsAccountId, - region=env.region, - environmentUri=env.environmentUri, - RoleArn=env.EnvironmentDefaultIAMRoleArn, - SamlAdminGroupName='admins', - VolumeSizeInGB=32, - InstanceType='ml.t3.medium', - ) - session.add(notebook) - yield notebook - - -@pytest.fixture(scope='module', autouse=True) -def pipeline1(db, env: models.Environment) -> models.DataPipeline: - with db.scoped_session() as session: - pipeline = models.DataPipeline( - label='thistable', - owner='me', - AwsAccountId=env.AwsAccountId, - region=env.region, - environmentUri=env.environmentUri, - repo='pipeline', - SamlGroupName='admins', - devStrategy='cdk-trunk' - ) - session.add(pipeline) - yield pipeline - - -@pytest.fixture(scope='module', autouse=True) -def pipeline2(db, env: models.Environment) -> models.DataPipeline: - with db.scoped_session() as session: - pipeline = models.DataPipeline( - label='thistable', - owner='me', - AwsAccountId=env.AwsAccountId, - region=env.region, - environmentUri=env.environmentUri, - repo='pipeline', - SamlGroupName='admins', - devStrategy='trunk' - ) - session.add(pipeline) - yield pipeline - - -@pytest.fixture(scope='module', autouse=True) -def pip_envs(db, env: models.Environment, pipeline1: models.DataPipeline) -> models.DataPipelineEnvironment: - with db.scoped_session() as session: - pipeline_env1 = models.DataPipelineEnvironment( - owner='me', - label=f"{pipeline1.label}-{env.label}", - environmentUri=env.environmentUri, - environmentLabel=env.label, - pipelineUri=pipeline1.DataPipelineUri, - pipelineLabel=pipeline1.label, - envPipelineUri=f"{pipeline1.DataPipelineUri}{env.environmentUri}", - AwsAccountId=env.AwsAccountId, - region=env.region, - stage='dev', - order=1, - samlGroupName='admins' - ) - - session.add(pipeline_env1) - - yield api.Pipeline.query_pipeline_environments(session=session, uri=pipeline1.DataPipelineUri) - -@pytest.fixture(scope='module', autouse=True) -def redshift_cluster(db, env: models.Environment) -> models.RedshiftCluster: - with db.scoped_session() as session: - cluster = models.RedshiftCluster( - environmentUri=env.environmentUri, - organizationUri=env.organizationUri, - owner='owner', - label='cluster', - description='desc', - masterDatabaseName='dev', - masterUsername='masteruser', - databaseName='datahubdb', - nodeType='dc1.large', - numberOfNodes=2, - port=5432, - region=env.region, - AwsAccountId=env.AwsAccountId, - status='CREATING', - vpc='vpc-12344', - IAMRoles=[env.EnvironmentDefaultIAMRoleArn], - tags=[], - SamlGroupName='admins', - imported=False, - ) - session.add(cluster) - yield cluster - - -@pytest.fixture(scope='function', autouse=True) -def patch_ssm(mocker): - mocker.patch( - 'dataall.utils.parameter.Parameter.get_parameter', return_value='param' - ) diff --git a/tests/cdkproxy/test_cdk_pipeline_stack.py b/tests/cdkproxy/test_cdk_pipeline_stack.py deleted file mode 100644 index ed447b931..000000000 --- a/tests/cdkproxy/test_cdk_pipeline_stack.py +++ /dev/null @@ -1,57 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.cdkpipeline.cdk_pipeline import CDKPipelineStack - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, pipeline1, env, pip_envs, org): - mocker.patch( - 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_target', - return_value=pipeline1, - ) - mocker.patch( - 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_pipeline_cicd_environment', - return_value=env, - ) - mocker.patch( - 'dataall.cdkproxy.cdkpipeline.cdk_pipeline.CDKPipelineStack.get_pipeline_environments', - return_value=pip_envs, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=pipeline1, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template1(pipeline1): - app = App() - CDKPipelineStack(app, 'CDKPipeline', target_uri=pipeline1.DataPipelineUri) - return json.dumps(app.synth().get_stack_by_name('CDKPipeline').template) - - -# def test_resources_created_cdk_trunk(template1): -# assert 'AWS::CodeCommit::Repository' in template1 - diff --git a/tests/cdkproxy/test_dataset_stack.py b/tests/cdkproxy/test_dataset_stack.py deleted file mode 100644 index 14caf7942..000000000 --- a/tests/cdkproxy/test_dataset_stack.py +++ /dev/null @@ -1,54 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks import Dataset - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, dataset, env, org): - mocker.patch('dataall.cdkproxy.stacks.dataset.Dataset.get_engine', return_value=db) - mocker.patch( - 'dataall.cdkproxy.stacks.dataset.Dataset.get_target', return_value=dataset - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.aws.handlers.lakeformation.LakeFormation.check_existing_lf_registered_location', - return_value=False, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=dataset, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(dataset): - app = App() - Dataset(app, 'Dataset', target_uri=dataset.datasetUri) - return json.dumps(app.synth().get_stack_by_name('Dataset').template) - - -def test_resources_created(template): - assert 'AWS::S3::Bucket' in template - assert 'AWS::KMS::Key' in template - assert 'AWS::IAM::Role' in template - assert 'AWS::IAM::Policy' in template - assert 'AWS::S3::BucketPolicy' in template - assert 'AWS::Glue::Job' in template diff --git a/tests/cdkproxy/test_environment_stack.py b/tests/cdkproxy/test_environment_stack.py deleted file mode 100644 index f5dceccdf..000000000 --- a/tests/cdkproxy/test_environment_stack.py +++ /dev/null @@ -1,65 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks import EnvironmentSetup - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, env, another_group, permissions): - mocker.patch( - 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value='dataall-pivot-role-name-pytest', - ) - mocker.patch( - 'dataall.aws.handlers.parameter_store.ParameterStoreManager.get_parameter_value', - return_value='False', - ) - mocker.patch( - 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_target', - return_value=env, - ) - mocker.patch( - 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_environment_groups', - return_value=[another_group], - ) - mocker.patch( - 'dataall.cdkproxy.stacks.sagemakerstudio.SageMakerDomain.check_existing_sagemaker_studio_domain', - return_value=True, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_account', - return_value='012345678901x', - ) - mocker.patch('dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=env, - ) - mocker.patch( - 'dataall.cdkproxy.stacks.environment.EnvironmentSetup.get_environment_group_permissions', - return_value=[permission.name for permission in permissions], - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_external_id_secret', - return_value='*****', - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(env): - app = App() - EnvironmentSetup(app, 'Environment', target_uri=env.environmentUri) - return json.dumps(app.synth().get_stack_by_name('Environment').template) - - -def test_resources_created(template, env): - assert 'AWS::S3::Bucket' in template - assert 'AWS::IAM::Role' in template - assert 'AWS::Lambda::Function' in template - assert 'AWS::IAM::Policy' in template diff --git a/tests/cdkproxy/test_pipeline_stack.py b/tests/cdkproxy/test_pipeline_stack.py deleted file mode 100644 index ed1d7d0c0..000000000 --- a/tests/cdkproxy/test_pipeline_stack.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -import os -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks.pipeline import PipelineStack - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, pipeline2, env, pip_envs, org): - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack.get_target', - return_value=pipeline2, - ) - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack.get_pipeline_cicd_environment', - return_value=env, - ) - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack.get_pipeline_environments', - return_value=pip_envs, - ) - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack._set_env_vars', - return_value=(os.environ, True) - ) - mocker.patch( - 'dataall.cdkproxy.stacks.pipeline.PipelineStack._check_repository', - return_value=False - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=pipeline2, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - -@pytest.fixture(scope='function', autouse=True) -def template2(pipeline2): - app = App() - PipelineStack(app, 'Pipeline', target_uri=pipeline2.DataPipelineUri) - return json.dumps(app.synth().get_stack_by_name('Pipeline').template) - - -def test_resources_created_cp_trunk(template2): - assert 'AWS::CodeCommit::Repository' in template2 - assert 'AWS::CodePipeline::Pipeline' in template2 - assert 'AWS::CodeBuild::Project' in template2 \ No newline at end of file diff --git a/tests/cdkproxy/test_redshift_cluster_stack.py b/tests/cdkproxy/test_redshift_cluster_stack.py deleted file mode 100644 index ab738a491..000000000 --- a/tests/cdkproxy/test_redshift_cluster_stack.py +++ /dev/null @@ -1,55 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks import RedshiftStack - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, redshift_cluster, env, org): - mocker.patch( - 'dataall.cdkproxy.stacks.redshift_cluster.RedshiftStack.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.cdkproxy.stacks.redshift_cluster.RedshiftStack.get_target', - return_value=(redshift_cluster, env), - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=redshift_cluster, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(redshift_cluster): - app = App() - RedshiftStack( - app, - 'Cluster', - env={'account': '123456789012', 'region': 'eu-west-1'}, - target_uri=redshift_cluster.clusterUri, - ) - return json.dumps(app.synth().get_stack_by_name('Cluster').template) - - -def test_resources_created(template): - assert 'AWS::Redshift::Cluster' in template - assert 'AWS::SecretsManager::Secret' in template - assert 'AWS::KMS::Key' in template diff --git a/tests/cdkproxy/test_sagemaker_notebook_stack.py b/tests/cdkproxy/test_sagemaker_notebook_stack.py deleted file mode 100644 index a17b673f2..000000000 --- a/tests/cdkproxy/test_sagemaker_notebook_stack.py +++ /dev/null @@ -1,48 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks import SagemakerNotebook - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, notebook, env, org): - mocker.patch( - 'dataall.cdkproxy.stacks.notebook.SagemakerNotebook.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.cdkproxy.stacks.notebook.SagemakerNotebook.get_target', - return_value=notebook, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=notebook, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(notebook): - app = App() - SagemakerNotebook(app, 'SagemakerNotebook', target_uri=notebook.notebookUri) - return json.dumps(app.synth().get_stack_by_name('SagemakerNotebook').template) - - -def test_resources_created(template): - assert 'AWS::SageMaker::NotebookInstance' in template diff --git a/tests/cdkproxy/test_sagemaker_studio_stack.py b/tests/cdkproxy/test_sagemaker_studio_stack.py deleted file mode 100644 index 6ff27e1a7..000000000 --- a/tests/cdkproxy/test_sagemaker_studio_stack.py +++ /dev/null @@ -1,50 +0,0 @@ -import json - -import pytest -from aws_cdk import App - -from dataall.cdkproxy.stacks import EnvironmentSetup, SagemakerStudioUserProfile - - -@pytest.fixture(scope='function', autouse=True) -def patch_methods(mocker, db, sgm_studio, env, org): - mocker.patch( - 'dataall.cdkproxy.stacks.sagemakerstudio.SagemakerStudioUserProfile.get_engine', - return_value=db, - ) - mocker.patch( - 'dataall.aws.handlers.sts.SessionHelper.get_delegation_role_name', - return_value="dataall-pivot-role-name-pytest", - ) - mocker.patch( - 'dataall.cdkproxy.stacks.sagemakerstudio.SagemakerStudioUserProfile.get_target', - return_value=sgm_studio, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_target', - return_value=sgm_studio, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_environment', - return_value=env, - ) - mocker.patch( - 'dataall.utils.runtime_stacks_tagging.TagsUtil.get_organization', - return_value=org, - ) - - -@pytest.fixture(scope='function', autouse=True) -def template(sgm_studio): - app = App() - SagemakerStudioUserProfile( - app, 'Studio', target_uri=sgm_studio.sagemakerStudioUserProfileUri - ) - return json.dumps(app.synth().get_stack_by_name('Studio').template) - - -def test_resources_created(template): - assert 'AWS::SageMaker::UserProfile' in template diff --git a/tests/client.py b/tests/client.py new file mode 100644 index 000000000..01b3ec83e --- /dev/null +++ b/tests/client.py @@ -0,0 +1,81 @@ +import typing +import json +from ariadne import graphql_sync +from ariadne.constants import PLAYGROUND_HTML +from flask import Flask, request, jsonify, Response +from munch import DefaultMunch +from dataall.base.api import get_executable_schema +from dataall.base.context import set_context, dispose_context, RequestContext +from dataall.base.config import config + +config.set_property("cdk_proxy_url", "mock_url") + + +class ClientWrapper: + def __init__(self, cli): + self.client = cli + + def query( + self, + query: str, + username: str = 'test', + groups: typing.List[str] = ['-'], + **variables, + ): + response: Response = self.client.post( + '/graphql', + json={'query': f""" {query} """, 'variables': variables}, + headers={'groups': json.dumps(groups), 'username': username}, + ) + return DefaultMunch.fromDict(response.get_json()) + + +def create_app(db): + app = Flask('tests') + schema = get_executable_schema() + + @app.route('/', methods=['OPTIONS']) + def opt(): + # On GET request serve GraphQL Playground + # You don't need to provide Playground if you don't want to + # but keep on mind this will not prohibit clients from + # exploring your API using desktop GraphQL Playground app. + return '

Hello

', 200 + + @app.route('/graphql', methods=['GET']) + def graphql_playground(): + # On GET request serve GraphQL Playground + # You don't need to provide Playground if you don't want to + # but keep on mind this will not prohibit clients from + # exploring your API using desktop GraphQL Playground app. + return PLAYGROUND_HTML, 200 + + @app.route('/graphql', methods=['POST']) + def graphql_server(): + # GraphQL queries are always sent as POST + # Note: Passing the request to the context is optional. + # In Flask, the current request is always accessible as flask.request + data = request.get_json() + + username = request.headers.get('Username', 'anonym') + groups = json.loads(request.headers.get('Groups', '[]')) + + set_context(RequestContext(db, username, groups)) + + success, result = graphql_sync( + schema, + data, + context_value={ + 'schema': None, + 'engine': db, + 'username': username, + 'groups': groups, + }, + debug=app.debug, + ) + + dispose_context() + status_code = 200 if success else 400 + return jsonify(result), status_code + + return app diff --git a/tests/conftest.py b/tests/conftest.py index b247f0659..5429e0302 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,14 +1,59 @@ import os +from dataclasses import dataclass + import pytest -import dataall +from dataall.base.db import get_engine, create_schema_and_tables, Engine +from dataall.base.loader import load_modules, ImportMode, list_loaded_modules +from glob import glob + +from dataall.core.cognito_groups.db.cognito_group_models import Group +from dataall.core.permissions.db import Tenant, Permission +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.core.permissions.permissions import TENANT_ALL +from tests.client import create_app, ClientWrapper +load_modules(modes=ImportMode.all()) ENVNAME = os.environ.get('envname', 'pytest') +collect_ignore_glob = [] + + +def ignore_module_tests_if_not_active(): + """ + Ignores tests of the modules that are turned off. + It uses the collect_ignore_glob hook + """ + modules = list_loaded_modules() + + all_module_files = set(glob(os.path.join("tests", "modules", "[!_]*"), recursive=True)) + active_module_tests = set() + for module in modules: + active_module_tests.update(glob(os.path.join("tests", "modules", module), recursive=True)) + + exclude_tests = all_module_files - active_module_tests + + # here is a small hack to satisfy both glob and pytest. glob is using os.getcwd() which is root of the project + # while using "make test". pytest is using test directory. Here is why we add "tests" prefix for glob and + # remove it for pytest + prefix_to_remove = f"tests{os.sep}" + + # migrate to remove prefix when runtime > 3.8 + exclude_tests = [excluded[len(prefix_to_remove):] for excluded in exclude_tests] + collect_ignore_glob.extend(exclude_tests) + + +ignore_module_tests_if_not_active() + + +@dataclass +class User: + username: str + @pytest.fixture(scope='module') -def db() -> dataall.db.Engine: - engine = dataall.db.get_engine(envname=ENVNAME) - dataall.db.create_schema_and_tables(engine, envname=ENVNAME) +def db() -> Engine: + engine = get_engine(envname=ENVNAME) + create_schema_and_tables(engine, envname=ENVNAME) yield engine engine.session().close() engine.engine.dispose() @@ -17,3 +62,117 @@ def db() -> dataall.db.Engine: @pytest.fixture(scope='module') def es(): yield True + + +@pytest.fixture(scope='module', autouse=True) +def app(db): + yield create_app(db) + + +@pytest.fixture(scope='module') +def client(app) -> ClientWrapper: + with app.test_client() as client: + yield ClientWrapper(client) + + +@pytest.fixture(scope='module', autouse=True) +def user(): + yield User('alice') + + +@pytest.fixture(scope='module', autouse=True) +def user2(): + yield User('bob') + + +@pytest.fixture(scope='module', autouse=True) +def user3(): + yield User('david') + + +def _create_group(db, tenant, name, user): + with db.scoped_session() as session: + group = Group(name=name, label=name, owner=user.username) + session.add(group) + session.commit() + + TenantPolicy.attach_group_tenant_policy( + session=session, + group=name, + permissions=TENANT_ALL, + tenant_name=tenant.name, + ) + return group + + +@pytest.fixture(scope='module') +def group(db, tenant, user): + yield _create_group(db, tenant, "testadmins", user) + + +@pytest.fixture(scope='module') +def group2(db, tenant, user2): + yield _create_group(db, tenant, "dataengineers", user2) + + +@pytest.fixture(scope='module') +def group3(db, tenant, user3): + yield _create_group(db, tenant, "datascientists", user3) + + +@pytest.fixture(scope='module') +def group4(db, tenant, user3): + yield _create_group(db, tenant, "externals", user3) + + +@pytest.fixture(scope='module', autouse=True) +def tenant(db, permissions): + with db.scoped_session() as session: + tenant = Tenant.save_tenant(session, name='dataall', description='Tenant dataall') + yield tenant + + +@pytest.fixture(scope='module', autouse=True) +def patch_request(module_mocker): + """we will mock requests.post so no call to cdk proxy will be made""" + module_mocker.patch('requests.post', return_value=True) + + +@pytest.fixture(scope='module', autouse=True) +def permissions(db): + with db.scoped_session() as session: + yield Permission.init_permissions(session) + + +@pytest.fixture(scope='function', autouse=True) +def patch_ssm(mocker): + mocker.patch( + 'dataall.utils.parameter.Parameter.get_parameter', return_value='param' + ) + + +@pytest.fixture(scope='module', autouse=True) +def patch_stack_tasks(module_mocker): + module_mocker.patch( + 'dataall.core.stacks.aws.ecs.Ecs.is_task_running', + return_value=False, + ) + module_mocker.patch( + 'dataall.core.stacks.aws.ecs.Ecs.run_cdkproxy_task', + return_value='arn:aws:eu-west-1:xxxxxxxx:ecs:task/1222222222', + ) + module_mocker.patch( + 'dataall.core.stacks.aws.cloudformation.CloudFormation.describe_stack_resources', + return_value=True, + ) + + +@pytest.fixture(scope='module', autouse=True) +def patch_check_env(module_mocker): + module_mocker.patch( + 'dataall.core.environment.api.resolvers.check_environment', + return_value='CDKROLENAME', + ) + module_mocker.patch( + 'dataall.core.environment.api.resolvers.get_pivot_role_as_part_of_environment', return_value=False + ) diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/cognito_groups/__init__.py b/tests/core/cognito_groups/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/cognito_groups/test_group.py b/tests/core/cognito_groups/test_group.py new file mode 100644 index 000000000..b569e5a5e --- /dev/null +++ b/tests/core/cognito_groups/test_group.py @@ -0,0 +1,25 @@ + + +def test_list_cognito_groups_env(client, env_fixture, group, module_mocker): + module_mocker.patch( + 'dataall.core.cognito_groups.aws.cognito.Cognito.list_cognito_groups', + return_value=[{"GroupName": 'cognitos'}, {"GroupName": 'testadmins'}], + ) + response = client.query( + """ + query listCognitoGroups ( + $filter: CognitoGroupFilter + ) { + listCognitoGroups ( + filter: $filter + ){ + groupName + } + } + """, + username='alice', + filter={'type': 'environment', 'uri': env_fixture.environmentUri}, + ) + assert response.data.listCognitoGroups[0].groupName == 'cognitos' + + diff --git a/tests/core/conftest.py b/tests/core/conftest.py new file mode 100644 index 000000000..7331a406f --- /dev/null +++ b/tests/core/conftest.py @@ -0,0 +1,131 @@ +import pytest + +from dataall.core.environment.db.environment_models import Environment +from dataall.core.organizations.db.organization_models import Organization + + +@pytest.fixture(scope='module', autouse=True) +def env(client): + cache = {} + + def factory(org, envname, owner, group, account, region, desc='test', parameters=None): + if not parameters: + parameters = {"dashboardsEnabled": "true"} + + key = f"{org.organizationUri}{envname}{owner}{''.join(group or '-')}{account}{region}" + if cache.get(key): + return cache[key] + response = client.query( + """mutation CreateEnv($input:NewEnvironmentInput){ + createEnvironment(input:$input){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + SamlGroupName + region + name + owner + parameters { + key + value + } + } + }""", + username=f'{owner}', + groups=[group], + input={ + 'label': f'{envname}', + 'description': f'{desc}', + 'organizationUri': org.organizationUri, + 'AwsAccountId': account, + 'tags': ['a', 'b', 'c'], + 'region': f'{region}', + 'SamlGroupName': f'{group}', + 'vpcId': 'vpc-123456', + 'parameters': [{'key': k, 'value': v} for k, v in parameters.items()] + }, + ) + cache[key] = response.data.createEnvironment + return cache[key] + + yield factory + + +@pytest.fixture(scope="module") +def environment(db): + def factory( + organization: Organization, + awsAccountId: str, + label: str, + owner: str, + samlGroupName: str, + environmentDefaultIAMRoleName: str, + ) -> Environment: + with db.scoped_session() as session: + env = Environment( + organizationUri=organization.organizationUri, + AwsAccountId=awsAccountId, + region="eu-central-1", + label=label, + owner=owner, + tags=[], + description="desc", + SamlGroupName=samlGroupName, + EnvironmentDefaultIAMRoleName=environmentDefaultIAMRoleName, + EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{awsAccountId}:role/{environmentDefaultIAMRoleName}", + CDKRoleArn=f"arn:aws::{awsAccountId}:role/EnvRole", + ) + session.add(env) + session.commit() + return env + + yield factory + + +@pytest.fixture(scope='module', autouse=True) +def org(client): + cache = {} + + def factory(orgname, owner, group): + key = orgname + owner + group + if cache.get(key): + print(f'returning item from cached key {key}') + return cache.get(key) + response = client.query( + """mutation CreateOrganization($input:NewOrganizationInput){ + createOrganization(input:$input){ + organizationUri + label + name + owner + SamlGroupName + } + }""", + username=f'{owner}', + groups=[group], + input={ + 'label': f'{orgname}', + 'description': f'test', + 'tags': ['a', 'b', 'c'], + 'SamlGroupName': f'{group}', + }, + ) + cache[key] = response.data.createOrganization + return cache[key] + + yield factory + + +@pytest.fixture(scope='module') +def org_fixture(org, user, group): + org1 = org('testorg', user.username, group.name) + yield org1 + + +@pytest.fixture(scope='module') +def env_fixture(env, org_fixture, user, group, tenant, module_mocker): + env1 = env(org_fixture, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') + yield env1 diff --git a/tests/core/environments/__init__.py b/tests/core/environments/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/environments/test_env_stacks_updater.py b/tests/core/environments/test_env_stacks_updater.py new file mode 100644 index 000000000..c3ba18012 --- /dev/null +++ b/tests/core/environments/test_env_stacks_updater.py @@ -0,0 +1,11 @@ +from dataall.core.environment.tasks.env_stacks_updater import update_stacks + + +def test_stacks_update(db, org_fixture, env_fixture, mocker): + mocker.patch( + 'dataall.core.environment.tasks.env_stacks_updater.update_stack', + return_value=True, + ) + envs, others = update_stacks(engine=db, envname='local') + assert envs == 1 + assert others == 0 diff --git a/tests/core/environments/test_environment.py b/tests/core/environments/test_environment.py new file mode 100644 index 000000000..b28628996 --- /dev/null +++ b/tests/core/environments/test_environment.py @@ -0,0 +1,682 @@ +from dataall.core.environment.api.enums import EnvironmentPermission +from dataall.core.environment.db.environment_models import Environment +from dataall.core.environment.services.environment_service import EnvironmentService + + +def get_env(client, env_fixture, group): + return client.query( + """ + query GetEnv($environmentUri:String!){ + getEnvironment(environmentUri:$environmentUri){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + region + SamlGroupName + owner + stack{ + EcsTaskArn + EcsTaskId + } + parameters { + key + value + } + } + } + """, + username='alice', + environmentUri=env_fixture.environmentUri, + groups=[group.name], + ) + + +def test_get_environment(client, org_fixture, env_fixture, group): + response = get_env(client, env_fixture, group) + assert ( + response.data.getEnvironment.organization.organizationUri + == org_fixture.organizationUri + ) + body = response.data.getEnvironment + assert body.owner == 'alice' + assert body.AwsAccountId == env_fixture.AwsAccountId + + params = {p.key: p.value for p in body.parameters} + assert params["dashboardsEnabled"] == "true" + + +def test_get_environment_object_not_found(client, org_fixture, env_fixture, group): + response = client.query( + """ + query GetEnv($environmentUri:String!){ + getEnvironment(environmentUri:$environmentUri){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + region + SamlGroupName + owner + } + } + """, + username='alice', + environmentUri='doesnotexist', + groups=[group.name], + ) + assert 'UnauthorizedOperation' in response.errors[0].message + + +def test_update_env(client, org_fixture, env_fixture, group): + query = """ + mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ + updateEnvironment(environmentUri:$environmentUri,input:$input){ + organization{ + organizationUri + } + label + AwsAccountId + region + SamlGroupName + owner + tags + resourcePrefix + parameters { + key + value + } + } + } + """ + + response = client.query(query, + username='alice', + environmentUri=env_fixture.environmentUri, + input={ + 'label': 'DEV', + 'tags': ['test', 'env'], + 'parameters': [ + { + 'key': 'moduleEnabled', + 'value': 'True' + } + ], + 'resourcePrefix': 'customer-prefix_AZ390 ', + }, + groups=[group.name], + ) + assert 'InvalidInput' in response.errors[0].message + + response = client.query(query, + username='alice', + environmentUri=env_fixture.environmentUri, + input={ + 'label': 'DEV', + 'tags': ['test', 'env'], + 'parameters': [ + { + 'key': 'moduleEnabled', + 'value': 'True' + } + ], + 'resourcePrefix': 'customer-prefix', + }, + groups=[group.name], + ) + print(response) + assert ( + response.data.updateEnvironment.organization.organizationUri + == org_fixture.organizationUri + ) + assert response.data.updateEnvironment.owner == 'alice' + assert response.data.updateEnvironment.AwsAccountId == env_fixture.AwsAccountId + assert response.data.updateEnvironment.label == 'DEV' + assert str(response.data.updateEnvironment.tags) == str(['test', 'env']) + assert not response.data.updateEnvironment.dashboardsEnabled + assert response.data.updateEnvironment.parameters + assert response.data.updateEnvironment.parameters[0]["key"] == "moduleEnabled" + assert response.data.updateEnvironment.parameters[0]["value"] == "True" + assert response.data.updateEnvironment.resourcePrefix == 'customer-prefix' + + +def test_update_params(client, org_fixture, env_fixture, group): + def update_params(parameters): + return client.query( + query, + username='alice', + environmentUri=env_fixture.environmentUri, + input=parameters, + groups=[group.name], + ) + + query = """ + mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ + updateEnvironment(environmentUri:$environmentUri,input:$input){ + parameters { + key + value + } + } + } + """ + + module_enabled = {'parameters': [ {'key': 'moduleEnabled','value': 'True'}]} + environment = update_params(module_enabled).data.updateEnvironment + assert len(environment.parameters) + assert environment.parameters[0]["key"] == "moduleEnabled" + assert environment.parameters[0]["value"] == "True" + + +def test_unauthorized_update(client, org_fixture, env_fixture): + response = client.query( + """ + mutation UpdateEnv($environmentUri:String!,$input:ModifyEnvironmentInput){ + updateEnvironment(environmentUri:$environmentUri,input:$input){ + organization{ + organizationUri + } + label + AwsAccountId + region + SamlGroupName + owner + tags + } + } + """, + username='bob', + environmentUri=env_fixture.environmentUri, + input={'label': 'DEV', 'tags': ['test', 'env']}, + ) + assert 'UnauthorizedOperation' in response.errors[0].message + + +def test_list_environments_no_filter(org_fixture, env_fixture, client, group): + response = client.query( + """ + query ListEnvironments($filter:EnvironmentFilter){ + listEnvironments(filter:$filter){ + count + nodes{ + environmentUri + owner + name + userRoleInEnvironment + label + AwsAccountId + region + } + } + } + """, + username='alice', + groups=[group.name], + ) + print(response) + + assert response.data.listEnvironments.count == 1 + + response = client.query( + """ + query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ + listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ + count + nodes{ + VpcId + SamlGroupName + } + } + } + """, + environmentUri=env_fixture.environmentUri, + username='alice', + groups=[group.name], + ) + print(response) + + assert response.data.listEnvironmentNetworks.count == 1 + + +def test_list_environment_role_filter_as_creator(org_fixture, env_fixture, client, group): + response = client.query( + """ + query ListEnvironments($filter:EnvironmentFilter){ + listEnvironments(filter:$filter){ + count + nodes{ + environmentUri + name + owner + label + AwsAccountId + region + } + } + } + """, + username='alice', + groups=[group.name], + ) + print('--->', response) + + assert response.data.listEnvironments.count == 1 + + +def test_list_environment_role_filter_as_admin(db, client, org_fixture, env_fixture, user, group): + response = client.query( + """ + query ListEnvironments($filter:EnvironmentFilter){ + listEnvironments(filter:$filter){ + count + nodes{ + environmentUri + name + owner + label + AwsAccountId + region + } + } + } + """, + username=user.username, + groups=[group.name], + filter={'roles': [EnvironmentPermission.Invited.name]}, + ) + + assert response.data.listEnvironments.count == 1 + + +def test_paging(db, client, org_fixture, env_fixture, user, group): + for i in range(1, 30): + with db.scoped_session() as session: + env = Environment( + organizationUri=org_fixture.organizationUri, + AwsAccountId=f'12345678901+{i}', + region='eu-west-1', + label='org', + owner=user.username, + tags=[], + description='desc', + SamlGroupName=group.name, + EnvironmentDefaultIAMRoleName='EnvRole', + EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', + CDKRoleArn='arn:aws::123456789012:role/EnvRole', + userRoleInEnvironment='999', + ) + session.add(env) + session.commit() + + hasNext = True + nb_iter = 0 + page = 1 + max_iter = 10 + first_id = None + while hasNext and nb_iter < max_iter: + response = client.query( + """ + query LE($filter:EnvironmentFilter){ + listEnvironments(filter:$filter){ + count + page + pageSize + hasNext + hasPrevious + nodes{ + environmentUri + } + } + } + """, + username=user.username, + filter={'page': page, 'pageSize': 5}, + groups=[group.name], + ) + assert len(response.data.listEnvironments.nodes) == 5 + hasNext = response.data.listEnvironments.hasNext + nb_iter = nb_iter + 1 + page += 1 + if page > 1: + assert first_id != response.data.listEnvironments.nodes[0].environmentUri + first_id = response.data.listEnvironments.nodes[0].environmentUri + + +def test_group_invitation(db, client, env_fixture, org_fixture, group2, user, group3, group): + response = client.query( + """ + query listEnvironmentGroupInvitationPermissions($environmentUri:String){ + listEnvironmentGroupInvitationPermissions(environmentUri:$environmentUri){ + permissionUri + name + type + } + } + """, + username=user.username, + groups=[group.name, group2.name], + filter={}, + ) + + env_permissions = [ + p.name for p in response.data.listEnvironmentGroupInvitationPermissions + ] + + response = client.query( + """ + mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ + inviteGroupOnEnvironment(input:$input){ + environmentUri + } + } + """, + username='alice', + input=dict( + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + permissions=env_permissions, + environmentIAMRoleName='myteamrole', + ), + groups=[group.name, group2.name], + ) + print(response) + assert response.data.inviteGroupOnEnvironment + + response = client.query( + """ + query getGroup($groupUri:String!, $environmentUri:String){ + getGroup(groupUri:$groupUri){ + environmentPermissions(environmentUri:$environmentUri){ + name + } + } + } + """, + username=user.username, + groups=[group2.name], + groupUri=group2.name, + environmentUri=env_fixture.environmentUri, + ) + env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] + + response = client.query( + """ + mutation updateGroupEnvironmentPermissions($input:InviteGroupOnEnvironmentInput!){ + updateGroupEnvironmentPermissions(input:$input){ + environmentUri + } + } + """, + username='alice', + input=dict( + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + permissions=env_permissions, + ), + groups=[group.name, group2.name], + ) + print(response) + assert response.data.updateGroupEnvironmentPermissions + response = client.query( + """ + query listEnvironmentInvitedGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentInvitedGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + } + } + } + """, + username=user.username, + groups=[group.name, group2.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listEnvironmentInvitedGroups.count == 1 + + response = client.query( + """ + query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + environmentIAMRoleName + } + } + } + """, + username=user.username, + groups=[group.name, group2.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listEnvironmentGroups.count == 2 + assert 'myteamrole' in [ + g.environmentIAMRoleName for g in response.data.listEnvironmentGroups.nodes + ] + + response = client.query( + """ + query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + } + } + } + """, + username=user.username, + groups=[group.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listEnvironmentGroups.count == 1 + + response = client.query( + """ + query listAllEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ + listAllEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + } + } + } + """, + username=user.username, + groups=[group.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listAllEnvironmentGroups.count == 2 + + response = client.query( + """ + mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ + removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ + environmentUri + } + } + """, + username='alice', + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + groups=[group.name, group2.name], + ) + print(response) + assert response.data.removeGroupFromEnvironment + + response = client.query( + """ + query listEnvironmentInvitedGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentInvitedGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + } + } + } + """, + username=user.username, + groups=[group.name, group2.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listEnvironmentInvitedGroups.count == 0 + + response = client.query( + """ + query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + } + } + } + """, + username=user.username, + groups=[group.name, group2.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + + assert response.data.listEnvironmentGroups.count == 1 + + response = client.query( + """ + mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ + inviteGroupOnEnvironment(input:$input){ + environmentUri + } + } + """, + username='alice', + input=dict( + environmentUri=env_fixture.environmentUri, + groupUri=group3.name, + permissions=env_permissions, + ), + groups=[group.name, group3.name], + ) + print(response) + assert response.data.inviteGroupOnEnvironment + + response = client.query( + """ + query listEnvironmentGroups($environmentUri: String!, $filter:GroupFilter){ + listEnvironmentGroups(environmentUri:$environmentUri, filter:$filter){ + count + nodes{ + groupUri + name + environmentIAMRoleName + } + } + } + """, + username=user.username, + groups=[group.name, group2.name, group3.name], + environmentUri=env_fixture.environmentUri, + filter={}, + ) + assert 'myteamrole' not in [ + g.environmentIAMRoleName for g in response.data.listEnvironmentGroups.nodes + ] + + +def test_archive_env(client, org_fixture, env_fixture, group, group2): + response = client.query( + """ + mutation deleteEnvironment($environmentUri:String!, $deleteFromAWS:Boolean!){ + deleteEnvironment(environmentUri:$environmentUri, deleteFromAWS:$deleteFromAWS) + } + """, + username='alice', + groups=[group.name, group2.name], + environmentUri=env_fixture.environmentUri, + deleteFromAWS=True, + ) + print(response) + assert response.data.deleteEnvironment + + +def test_create_environment(db, client, org_fixture, env_fixture, user, group): + response = client.query( + """mutation CreateEnv($input:NewEnvironmentInput){ + createEnvironment(input:$input){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + SamlGroupName + region + name + owner + EnvironmentDefaultIAMRoleName + EnvironmentDefaultIAMRoleImported + resourcePrefix + networks{ + VpcId + region + privateSubnetIds + publicSubnetIds + default + } + } + }""", + username=user.username, + groups=[group.name], + input={ + 'label': f'dev', + 'description': f'test', + 'EnvironmentDefaultIAMRoleName': 'myOwnIamRole', + 'organizationUri': org_fixture.organizationUri, + 'AwsAccountId': env_fixture.AwsAccountId, + 'tags': ['a', 'b', 'c'], + 'region': f'{env_fixture.region}', + 'SamlGroupName': group.name, + 'vpcId': 'vpc-1234567', + 'privateSubnetIds': 'subnet-1', + 'publicSubnetIds': 'subnet-21', + 'resourcePrefix': 'customer-prefix', + }, + ) + + body = response.data.createEnvironment + + assert body.networks + assert body.EnvironmentDefaultIAMRoleName == 'myOwnIamRole' + assert body.EnvironmentDefaultIAMRoleImported + assert body.resourcePrefix == 'customer-prefix' + for vpc in body.networks: + assert vpc.privateSubnetIds + assert vpc.publicSubnetIds + assert vpc.default + + with db.scoped_session() as session: + env = EnvironmentService.get_environment_by_uri( + session, response.data.createEnvironment.environmentUri + ) + session.delete(env) + session.commit() diff --git a/tests/core/organizations/__init__.py b/tests/core/organizations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/api/test_organization.py b/tests/core/organizations/test_organization.py similarity index 92% rename from tests/api/test_organization.py rename to tests/core/organizations/test_organization.py index fd414af31..40b304ff9 100644 --- a/tests/api/test_organization.py +++ b/tests/core/organizations/test_organization.py @@ -1,34 +1,36 @@ -import dataall import pytest +from dataall.core.environment.db.environment_models import Environment, EnvironmentParameter +from dataall.core.organizations.api.enums import OrganisationUserRole + @pytest.fixture(scope='module', autouse=True) def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) + org1 = org('testorg', user.username, group.name) yield org1 @pytest.fixture(scope='module', autouse=True) def org2(org, user2, group2, tenant): - org2 = org('anothertestorg', user2.userName, group2.name) + org2 = org('anothertestorg', user2.username, group2.name) yield org2 @pytest.fixture(scope='module', autouse=True) def env_dev(env, org2, user2, group2, tenant): - env2 = env(org2, 'dev', user2.userName, group2.name, '222222222222', 'eu-west-1', 'description') + env2 = env(org2, 'dev', user2.username, group2.name, '222222222222', 'eu-west-1', 'description') yield env2 @pytest.fixture(scope='module', autouse=True) def env_other(env, org2, user2, group2, tenant): - env2 = env(org2, 'other', user2.userName, group2.name, '222222222222', 'eu-west-1') + env2 = env(org2, 'other', user2.username, group2.name, '222222222222', 'eu-west-1') yield env2 @pytest.fixture(scope='module', autouse=True) def env_prod(env, org2, user2, group2, tenant): - env2 = env(org2, 'prod', user2.userName, group2.name, '111111111111', 'eu-west-1', 'description') + env2 = env(org2, 'prod', user2.username, group2.name, '111111111111', 'eu-west-1', 'description') yield env2 @@ -176,13 +178,13 @@ def test_list_organizations_anyone(client, org1): }""", 'tom', ['all'], - filter={'roles': [dataall.api.constants.OrganisationUserRole.Member.name]}, + filter={'roles': [OrganisationUserRole.Member.name]}, ) print(response) assert response.data.listOrganizations.count == 0 -def test_group_invitation(db, client, org1, group2, user, group3, group, dataset, env): +def test_group_invitation(db, client, org1, group2, user, group3, group, env): response = client.query( """ mutation inviteGroupToOrganization($input:InviteGroupToOrganizationInput){ @@ -232,7 +234,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset } } """, - username=user.userName, + username=user.username, groups=[group.name, group2.name], organizationUri=org1.organizationUri, filter={}, @@ -252,7 +254,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset } } """, - username=user.userName, + username=user.username, groups=[group.name, group2.name], organizationUri=org1.organizationUri, filter={}, @@ -260,7 +262,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset assert response.data.listOrganizationGroups.count == 2 - env2 = env(org1, 'devg2', user.userName, group2.name, '111111111112', 'eu-west-1') + env2 = env(org1, 'devg2', user.username, group2.name, '111111111112', 'eu-west-1') assert env2.environmentUri response = client.query( @@ -280,8 +282,9 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset assert 'OrganizationResourcesFound' in response.errors[0].message with db.scoped_session() as session: - dataset = session.query(dataall.db.models.Environment).get(env2.environmentUri) - session.delete(dataset) + session.query(EnvironmentParameter).filter(EnvironmentParameter.environmentUri == env2.environmentUri).delete() + env = session.query(Environment).get(env2.environmentUri) + session.delete(env) session.commit() response = client.query( @@ -312,7 +315,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset } } """, - username=user.userName, + username=user.username, groups=[group.name, group2.name], organizationUri=org1.organizationUri, filter={}, @@ -332,7 +335,7 @@ def test_group_invitation(db, client, org1, group2, user, group3, group, dataset } } """, - username=user.userName, + username=user.username, groups=[group.name, group2.name], organizationUri=org1.organizationUri, filter={}, diff --git a/tests/core/permissions/__init__.py b/tests/core/permissions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/permissions/test_permission.py b/tests/core/permissions/test_permission.py new file mode 100644 index 000000000..9eef192a5 --- /dev/null +++ b/tests/core/permissions/test_permission.py @@ -0,0 +1,62 @@ +import pytest + +from dataall.core.permissions.db.permission_repositories import Permission +from dataall.core.permissions.db.permission_models import PermissionType +from dataall.core.permissions.db.tenant_policy_repositories import TenantPolicy +from dataall.base.db import exceptions +from dataall.core.permissions.permissions import MANAGE_GROUPS, ENVIRONMENT_ALL, ORGANIZATION_ALL, TENANT_ALL + + +def permissions(db, all_perms): + with db.scoped_session() as session: + permissions = [] + for p in all_perms: + permissions.append( + Permission.save_permission( + session, + name=p, + description=p, + permission_type=PermissionType.RESOURCE.name, + ) + ) + for p in TENANT_ALL: + permissions.append( + Permission.save_permission( + session, + name=p, + description=p, + permission_type=PermissionType.TENANT.name, + ) + ) + session.commit() + + +def test_attach_tenant_policy(db, group, tenant): + permissions(db, ORGANIZATION_ALL + ENVIRONMENT_ALL) + with db.scoped_session() as session: + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group.name, + permissions=[MANAGE_GROUPS], + tenant_name='dataall', + ) + + assert TenantPolicy.check_user_tenant_permission( + session=session, + username='alice', + groups=[group.name], + permission_name=MANAGE_GROUPS, + tenant_name='dataall', + ) + + +def test_unauthorized_tenant_policy(db, group): + with pytest.raises(exceptions.TenantUnauthorized): + with db.scoped_session() as session: + assert TenantPolicy.check_user_tenant_permission( + session=session, + username='alice', + groups=[group.name], + permission_name='UNKNOW_PERMISSION', + tenant_name='dataall', + ) diff --git a/tests/api/test_tenant.py b/tests/core/permissions/test_tenant.py similarity index 91% rename from tests/api/test_tenant.py rename to tests/core/permissions/test_tenant.py index a41eab9bd..3aefac5dd 100644 --- a/tests/api/test_tenant.py +++ b/tests/core/permissions/test_tenant.py @@ -1,4 +1,4 @@ -from dataall.db import permissions +from dataall.core.permissions import permissions def test_list_tenant_permissions(client, user, group, tenant): @@ -10,7 +10,7 @@ def test_list_tenant_permissions(client, user, group, tenant): } } """, - username=user.userName, + username=user.username, groups=[group.name, 'DAAdministrators'], ) assert len(response.data.listTenantPermissions) >= 1 @@ -23,7 +23,7 @@ def test_list_tenant_permissions(client, user, group, tenant): } } """, - username=user.userName, + username=user.username, groups=[group.name], ) assert 'UnauthorizedOperation' in response.errors[0].message @@ -41,7 +41,7 @@ def test_list_tenant_permissions(client, user, group, tenant): } } """, - username=user.userName, + username=user.username, groups=[group.name, 'DAAdministrators'], ) @@ -60,7 +60,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_GROUPS], ), groups=[group.name, 'DAAdministrators'], ) @@ -77,7 +77,7 @@ def test_update_permissions(client, user, group, tenant): } } """, - username=user.userName, + username=user.username, groups=[group.name, 'DAAdministrators'], groupUri=group.name, ) @@ -92,7 +92,7 @@ def test_update_permissions(client, user, group, tenant): username='alice', input=dict( groupUri=group.name, - permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_DATASETS], + permissions=[permissions.MANAGE_ORGANIZATIONS, permissions.MANAGE_GROUPS], ), groups=[group.name, 'DAAdministrators'], ) diff --git a/tests/core/stacks/__init__.py b/tests/core/stacks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/stacks/test_keyvaluetag.py b/tests/core/stacks/test_keyvaluetag.py new file mode 100644 index 000000000..5486ea1e6 --- /dev/null +++ b/tests/core/stacks/test_keyvaluetag.py @@ -0,0 +1,53 @@ +import pytest + +from dataall.core.stacks.db.target_type_repositories import TargetType +from dataall.base.db import exceptions + + +def list_tags_query(client, target_uri, target_type, group): + query = client.query( + """ + query listKeyValueTags($targetUri:String!, $targetType:String!){ + listKeyValueTags(targetUri:$targetUri, targetType:$targetType){ + tagUri + targetUri + targetType + key + value + cascade + } + } + """, + targetUri=target_uri, + targetType=target_type, + username='alice', + groups=[group], + ) + return query + + +def test_unsupported_target_type(db): + with pytest.raises(exceptions.InvalidInput): + assert TargetType.is_supported_target_type('unknown') + + +def update_key_value_tags(client, target_uri, target_type, tags, group): + return ( + client.query( + """ + mutation updateKeyValueTags($input:UpdateKeyValueTagsInput!){ + updateKeyValueTags(input:$input){ + tagUri + targetUri + targetType + key + value + cascade + } + } + """, + input=dict(targetUri=target_uri, targetType=target_type, tags=tags), + username='alice', + groups=[group], + ) + ) diff --git a/tests/core/stacks/test_stack.py b/tests/core/stacks/test_stack.py new file mode 100644 index 000000000..c9c3e7b21 --- /dev/null +++ b/tests/core/stacks/test_stack.py @@ -0,0 +1,29 @@ +def test_update_stack( + client, + tenant, + group, + env_fixture, +): + response = update_stack_query( + client, env_fixture.environmentUri, 'environment', group.name + ) + assert response.data.updateStack.targetUri == env_fixture.environmentUri + + +def update_stack_query(client, target_uri, target_type, group): + response = client.query( + """ + mutation updateStack($targetUri:String!, $targetType:String!){ + updateStack(targetUri:$targetUri, targetType:$targetType){ + stackUri + targetUri + name + } + } + """, + targetUri=target_uri, + targetType=target_type, + username='alice', + groups=[group], + ) + return response diff --git a/tests/core/vpc/__init__.py b/tests/core/vpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/vpc/test_vpc.py b/tests/core/vpc/test_vpc.py new file mode 100644 index 000000000..a55196d32 --- /dev/null +++ b/tests/core/vpc/test_vpc.py @@ -0,0 +1,122 @@ +import pytest + +from dataall.core.vpc.db.vpc_models import Vpc + + +@pytest.fixture(scope='module', autouse=True) +def vpc(env_fixture, group, client) -> Vpc: + response = client.query( + """ + mutation createNetwork($input:NewVpcInput){ + createNetwork(input:$input){ + vpcUri + label + description + tags + owner + SamlGroupName + privateSubnetIds + privateSubnetIds + } + } + """, + input={ + 'label': 'myvpc', + 'SamlGroupName': group.name, + 'tags': [group.name], + 'vpcId': 'vpc-12345678', + 'privateSubnetIds': ['sub1', 'sub2'], + 'publicSubnetIds': ['sub1', 'sub2'], + 'environmentUri': env_fixture.environmentUri, + }, + username='alice', + groups=[group.name], + ) + assert response.data.createNetwork.SamlGroupName + assert response.data.createNetwork.label + yield response.data.createNetwork + + +def test_list_networks(client, env_fixture, db, user, group, vpc): + response = client.query( + """ + query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ + listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ + count + nodes{ + VpcId + SamlGroupName + publicSubnetIds + privateSubnetIds + default + } + } + } + """, + environmentUri=env_fixture.environmentUri, + filter=None, + username='alice', + groups=[group.name], + ) + print(response) + + assert response.data.listEnvironmentNetworks.count == 2 + + +def test_list_networks_nopermissions(client, env_fixture, db, user, group2, vpc): + response = client.query( + """ + query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ + listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ + count + nodes{ + VpcId + SamlGroupName + publicSubnetIds + privateSubnetIds + default + } + } + } + """, + environmentUri=env_fixture.environmentUri, + filter=None, + username='bob', + groups=[group2.name], + ) + assert 'UnauthorizedOperation' in response.errors[0].message + + +def test_delete_network(client, env_fixture, db, user, group, module_mocker, vpc): + response = client.query( + """ + mutation deleteNetwork($vpcUri:String!){ + deleteNetwork(vpcUri:$vpcUri) + } + """, + vpcUri=vpc.vpcUri, + username=user.username, + groups=[group.name], + ) + assert response.data.deleteNetwork + response = client.query( + """ + query ListEnvironmentNetworks($environmentUri: String!,$filter:VpcFilter){ + listEnvironmentNetworks(environmentUri:$environmentUri,filter:$filter){ + count + nodes{ + VpcId + SamlGroupName + publicSubnetIds + privateSubnetIds + default + } + } + } + """, + environmentUri=env_fixture.environmentUri, + filter=None, + username='alice', + groups=[group.name], + ) + assert len(response.data.listEnvironmentNetworks['nodes']) == 1 diff --git a/tests/db/test_connect.py b/tests/db/test_connect.py deleted file mode 100644 index 32333d4cc..000000000 --- a/tests/db/test_connect.py +++ /dev/null @@ -1,19 +0,0 @@ -import os -import dataall - - -def test(db: dataall.db.Engine): - if os.getenv('local') or os.getenv('pytest'): - config: dataall.db.DbConfig = db.dbconfig - print(config) - assert config.host == 'localhost' - assert config.schema == 'pytest' - with db.scoped_session() as session: - models = [] - models = models + dataall.db.Base.__subclasses__() - models = models + dataall.db.Resource.__subclasses__() - for model in models: - nb = session.query(model).count() - assert nb == 0 - else: - assert True diff --git a/tests/db/test_permission.py b/tests/db/test_permission.py deleted file mode 100644 index 148cd3051..000000000 --- a/tests/db/test_permission.py +++ /dev/null @@ -1,268 +0,0 @@ -import pytest - -import dataall -from dataall.api.constants import OrganisationUserRole -from dataall.db import exceptions -from dataall.db.models.Permission import PermissionType - - -@pytest.fixture(scope='module') -def permissions(db): - with db.scoped_session() as session: - permissions = [] - for p in ( - dataall.db.permissions.DATASET_READ - + dataall.db.permissions.DATASET_WRITE - + dataall.db.permissions.DATASET_TABLE_READ - + dataall.db.permissions.ORGANIZATION_ALL - + dataall.db.permissions.ENVIRONMENT_ALL - ): - permissions.append( - dataall.db.api.Permission.save_permission( - session, - name=p, - description=p, - permission_type=PermissionType.RESOURCE.name, - ) - ) - for p in dataall.db.permissions.TENANT_ALL: - permissions.append( - dataall.db.api.Permission.save_permission( - session, - name=p, - description=p, - permission_type=PermissionType.TENANT.name, - ) - ) - session.commit() - yield permissions - - -@pytest.fixture(scope='module') -def tenant(db): - with db.scoped_session() as session: - tenant = dataall.db.api.Tenant.save_tenant( - session, name='dataall', description='Tenant dataall' - ) - yield tenant - - -@pytest.fixture(scope='module') -def user(db): - with db.scoped_session() as session: - user = dataall.db.models.User(userId='alice@test.com', userName='alice') - session.add(user) - yield user - - -@pytest.fixture(scope='module') -def group(db, user): - with db.scoped_session() as session: - group = dataall.db.models.Group( - name='testadmins', label='testadmins', owner='alice' - ) - session.add(group) - yield group - - -@pytest.fixture(scope='module') -def group_user(db, group, user): - with db.scoped_session() as session: - member = dataall.db.models.GroupMember( - userName=user.userName, - groupUri=group.groupUri, - ) - session.add(member) - yield member - - -@pytest.fixture(scope='module', autouse=True) -def org(db, group): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName=group.name, - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db, group): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName=group.name, - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def dataset(org, env, db, group): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName=group.name, - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -def test_attach_resource_policy(db, user, group, group_user, dataset, permissions): - with db.scoped_session() as session: - - dataall.db.api.ResourcePolicy.attach_resource_policy( - session=session, - group=group.name, - permissions=dataall.db.permissions.DATASET_WRITE, - resource_uri=dataset.datasetUri, - resource_type=dataall.db.models.Dataset.__name__, - ) - assert dataall.db.api.ResourcePolicy.check_user_resource_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name=dataall.db.permissions.UPDATE_DATASET, - resource_uri=dataset.datasetUri, - ) - - -def test_attach_tenant_policy( - db, user, group, group_user, dataset, permissions, tenant -): - with db.scoped_session() as session: - - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group.name, - permissions=[dataall.db.permissions.MANAGE_DATASETS], - tenant_name='dataall', - ) - - assert dataall.db.api.TenantPolicy.check_user_tenant_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name=dataall.db.permissions.MANAGE_DATASETS, - tenant_name='dataall', - ) - - -def test_unauthorized_resource_policy( - db, user, group_user, group, dataset, permissions -): - with pytest.raises(exceptions.ResourceUnauthorized): - with db.scoped_session() as session: - assert dataall.db.api.ResourcePolicy.check_user_resource_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name='UNKNOW_PERMISSION', - resource_uri=dataset.datasetUri, - ) - - -def test_unauthorized_tenant_policy( - db, user, group, group_user, dataset, permissions, tenant -): - with pytest.raises(exceptions.TenantUnauthorized): - with db.scoped_session() as session: - assert dataall.db.api.TenantPolicy.check_user_tenant_permission( - session=session, - username=user.userName, - groups=[group.name], - permission_name='UNKNOW_PERMISSION', - tenant_name='dataall', - ) - - -def test_create_dataset(db, env, user, group, group_user, dataset, permissions, tenant): - with db.scoped_session() as session: - dataall.db.api.TenantPolicy.attach_group_tenant_policy( - session=session, - group=group.name, - permissions=dataall.db.permissions.TENANT_ALL, - tenant_name='dataall', - ) - org_with_perm = dataall.db.api.Organization.create_organization( - session=session, - username=user.userName, - groups=[group.name], - uri=None, - data={ - 'label': 'OrgWithPerm', - 'SamlGroupName': group.name, - 'description': 'desc', - 'tags': [], - }, - check_perm=True, - ) - env_with_perm = dataall.db.api.Environment.create_environment( - session=session, - username=user.userName, - groups=[group.name], - uri=org_with_perm.organizationUri, - data={ - 'label': 'EnvWithPerm', - 'organizationUri': org_with_perm.organizationUri, - 'SamlGroupName': group.name, - 'description': 'desc', - 'AwsAccountId': '123456789012', - 'region': 'eu-west-1', - 'cdk_role_name': 'cdkrole', - }, - check_perm=True, - ) - - data = dict( - label='label', - owner='foo', - SamlAdminGroupName=group.name, - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - dataset = dataall.db.api.Dataset.create_dataset( - session=session, - username=user.userName, - groups=[group.name], - uri=env_with_perm.environmentUri, - data=data, - check_perm=True, - ) - assert dataset diff --git a/tests/modules/__init__.py b/tests/modules/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/catalog/__init__.py b/tests/modules/catalog/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/catalog/test_glossary.py b/tests/modules/catalog/test_glossary.py new file mode 100644 index 000000000..e7373a28d --- /dev/null +++ b/tests/modules/catalog/test_glossary.py @@ -0,0 +1,490 @@ +from datetime import datetime + +from dataall.modules.catalog.db.glossary_models import GlossaryNode +import pytest + + +@pytest.fixture(scope='module', autouse=True) +def g1(client, group): + r = client.query( + """ + mutation CreateGlossary($input:CreateGlossaryInput){ + createGlossary(input:$input){ + nodeUri + label + readme + } + } + """, + input={ + 'label': 'Customer Glossary', + 'readme': 'Glossary of customer related data', + }, + username='alice', + groups=[group.name], + ) + yield r.data.createGlossary + + +@pytest.fixture(scope='module', autouse=True) +def c1(client, g1, group): + r = client.query( + """ + mutation CreateCategory( + $parentUri:String!, + $input:CreateCategoryInput){ + createCategory(parentUri:$parentUri,input:$input){ + nodeUri + label + readme + } + } + """, + parentUri=g1.nodeUri, + input={'label': 'Identifiers', 'readme': 'Customer identifiers category'}, + username='alice', + groups=[group.name], + ) + yield r.data.createCategory + + +@pytest.fixture(scope='module', autouse=True) +def subcategory(client, c1, group): + r = client.query( + """ + mutation CreateCategory( + $parentUri:String! + $input:CreateCategoryInput! + ){ + createCategory(parentUri:$parentUri,input:$input){ + nodeUri + label + readme + created + } + } + """, + input={ + 'label': 'OptionalIdentifiers', + 'readme': 'Additional, non required customer identifiers', + }, + parentUri=c1.nodeUri, + username='alice', + groups=[group.name], + ) + subcategory = r.data.createCategory + yield subcategory + + +@pytest.fixture(scope='module', autouse=True) +def t1(client, c1, group): + r = client.query( + """ + mutation CreateTerm( + $parentUri:String!, + $input:CreateTermInput){ + createTerm(parentUri:$parentUri,input:$input){ + nodeUri + label + readme + } + } + """, + parentUri=c1.nodeUri, + input={'label': 'Customer ID', 'readme': 'Global Customer Identifier'}, + username='alice', + groups=[group.name], + ) + yield r.data.createTerm + + +def test_list_glossaries(client): + response = client.query( + """ + query ListGlossaries{ + listGlossaries{ + count + nodes{ + nodeUri + children{ + count + nodes{ + __typename + ... on Category{ + label + nodeUri + path + } + ... on Term{ + label + nodeUri + path + } + } + } + stats{ + categories + terms + associations + } + } + } + } + """ + ) + assert response.data.listGlossaries.count == 1 + assert response.data.listGlossaries.nodes[0].stats.categories == 2 + + +def test_hierarchical_search(client): + response = client.query( + """ + query SearchGlossary($filter:GlossaryNodeSearchFilter){ + searchGlossary(filter:$filter){ + count + page + pages + hasNext + hasPrevious + nodes{ + __typename + ...on Glossary{ + nodeUri + label + readme + created + owner + path + } + ...on Category{ + nodeUri + label + parentUri + readme + created + owner + path + } + ...on Term{ + nodeUri + parentUri + label + readme + created + owner + path + } + + } + } + } + """ + ) + assert response.data.searchGlossary.count == 4 + + +def test_get_glossary(client, g1): + r = client.query( + """ + query GetGlossary($nodeUri:String!){ + getGlossary(nodeUri:$nodeUri){ + nodeUri + label + readme + } + } + """, + nodeUri=g1.nodeUri, + ) + assert r.data.getGlossary.nodeUri == g1.nodeUri + assert r.data.getGlossary.label == g1.label + assert r.data.getGlossary.readme == g1.readme + + +def test_get_category(client, c1): + r = client.query( + """ + query GetCategory($nodeUri:String!){ + getCategory(nodeUri:$nodeUri){ + nodeUri + label + readme + } + } + """, + nodeUri=c1.nodeUri, + ) + print(r) + assert r.data.getCategory.nodeUri == c1.nodeUri + assert r.data.getCategory.label == c1.label + assert r.data.getCategory.readme == c1.readme + + +def test_get_term(client, t1): + r = client.query( + """ + query GetTerm($nodeUri:String!){ + getTerm(nodeUri:$nodeUri){ + nodeUri + label + readme + } + } + """, + nodeUri=t1.nodeUri, + ) + assert r.data.getTerm.nodeUri == t1.nodeUri + assert r.data.getTerm.label == t1.label + assert r.data.getTerm.readme == t1.readme + + +def test_glossary_categories(client, g1, c1): + r = client.query( + """ + query GetGlossary($nodeUri:String!){ + getGlossary(nodeUri:$nodeUri){ + nodeUri + label + readme + categories{ + count + page + pages + hasNext + hasPrevious + nodes{ + nodeUri + label + readme + } + } + } + } + """, + nodeUri=g1.nodeUri, + ) + assert r.data.getGlossary.categories.count == 1 + assert r.data.getGlossary.categories.nodes[0].nodeUri == c1.nodeUri + + +def test_list_subcategory(client, c1): + r = client.query( + """ + query GetCategory($nodeUri:String!){ + getCategory(nodeUri:$nodeUri){ + nodeUri + label + readme + categories{ + count + nodes{ + nodeUri + label + readme + } + } + } + } + """, + nodeUri=c1.nodeUri, + ) + + assert r.data.getCategory.categories.count == 1 + + +def test_list_category_terms(client, c1): + r = client.query( + """ + query GetCategory($nodeUri:String!){ + getCategory(nodeUri:$nodeUri){ + nodeUri + label + readme + terms{ + count + nodes{ + nodeUri + label + readme + } + } + } + } + """, + nodeUri=c1.nodeUri, + ) + assert r.data.getCategory.terms.count == 1 + + +def test_update_glossary(client, g1, group): + r = client.query( + """ + mutation UpdateGlossary( + $nodeUri:String!, + $input:UpdateGlossaryInput! + ){ + updateGlossary( + nodeUri:$nodeUri, + input:$input + ){ + nodeUri + label + readme + } + } + """, + nodeUri=g1.nodeUri, + input={'readme': g1.readme + '(updated description)'}, + username='alice', + groups=[group.name], + ) + assert r.data.updateGlossary.readme == g1.readme + '(updated description)' + + +def test_update_category(client, c1, group): + r = client.query( + """ + mutation UpdateCategory( + $nodeUri:String!, + $input:UpdateCategoryInput! + ){ + updateCategory( + nodeUri:$nodeUri, + input:$input + ){ + nodeUri + label + readme + } + } + """, + nodeUri=c1.nodeUri, + input={'readme': c1.readme + '(updated description)'}, + username='alice', + groups=[group.name], + ) + assert r.data.updateCategory.readme == c1.readme + '(updated description)' + + +def test_delete_subcategory(client, subcategory, group): + r = client.query( + """ + mutation DeleteCategory( + $nodeUri:String!, + ){ + deleteCategory( + nodeUri:$nodeUri, + ) + } + """, + nodeUri=subcategory.nodeUri, + username='alice', + groups=[group.name], + ) + print(r) + + +def test_delete_category(client, db, c1, group): + now = datetime.now() + r = client.query( + """ + mutation DeleteCategory( + $nodeUri:String!, + ){ + deleteCategory( + nodeUri:$nodeUri, + ) + } + """, + nodeUri=c1.nodeUri, + username='alice', + groups=[group.name], + ) + with db.scoped_session() as session: + node = session.query(GlossaryNode).get(c1.nodeUri) + assert node.deleted >= now + + +def test_list_glossaries_after_delete(client): + response = client.query( + """ + query ListGlossaries{ + listGlossaries{ + count + nodes{ + nodeUri + children{ + count + nodes{ + __typename + ... on Category{ + label + nodeUri + path + } + ... on Term{ + label + nodeUri + path + } + } + } + stats{ + categories + terms + associations + } + } + } + } + """ + ) + assert response.data.listGlossaries.count == 1 + assert response.data.listGlossaries.nodes[0].stats.categories == 0 + + +def test_hierarchical_search_after_delete(client): + response = client.query( + """ + query SearchGlossary($filter:GlossaryNodeSearchFilter){ + searchGlossary(filter:$filter){ + count + page + pages + hasNext + hasPrevious + nodes{ + __typename + ...on Glossary{ + nodeUri + label + readme + created + owner + path + } + ...on Category{ + nodeUri + label + parentUri + readme + created + owner + path + } + ...on Term{ + nodeUri + parentUri + label + readme + created + owner + path + } + + } + } + } + """ + ) + assert response.data.searchGlossary.count == 1 diff --git a/tests/modules/conftest.py b/tests/modules/conftest.py new file mode 100644 index 000000000..b966cb388 --- /dev/null +++ b/tests/modules/conftest.py @@ -0,0 +1,138 @@ +from typing import Dict + +import pytest + +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup, EnvironmentParameter +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.permissions.permissions import ENVIRONMENT_ALL +from dataall.core.stacks.db.stack_repositories import Stack +from dataall.core.stacks.db.stack_models import KeyValueTag + + +@pytest.fixture(scope='module', autouse=True) +def patch_es(module_mocker): + module_mocker.patch('dataall.base.searchproxy.connect', return_value={}) + module_mocker.patch('dataall.base.searchproxy.search', return_value={}) + module_mocker.patch('dataall.modules.catalog.indexers.base_indexer.BaseIndexer.delete_doc', return_value={}) + module_mocker.patch('dataall.modules.catalog.indexers.base_indexer.BaseIndexer._index', return_value={}) + + +@pytest.fixture(scope="module") +def environment_group(db): + def factory( + environment: Environment, group: str + ) -> EnvironmentGroup: + with db.scoped_session() as session: + env_group = EnvironmentGroup( + environmentUri=environment.environmentUri, + groupUri=group, + environmentIAMRoleArn=environment.EnvironmentDefaultIAMRoleArn, + environmentIAMRoleName=environment.EnvironmentDefaultIAMRoleName, + environmentAthenaWorkGroup="workgroup", + ) + session.add(env_group) + session.commit() + ResourcePolicy.attach_resource_policy( + session=session, + resource_uri=environment.environmentUri, + group=group, + permissions=ENVIRONMENT_ALL, + resource_type=Environment.__name__, + ) + session.commit() + return env_group + + yield factory + + +def _create_env_params(session, env: Environment, params: Dict[str, str]): + if params: + for key, value in params.items(): + param = EnvironmentParameter( + env_uri=env.environmentUri, key=key, value=value, + ) + session.add(param) + session.commit() + + +def _create_env_stack(session, env): + tags = KeyValueTag( + targetType='environment', + targetUri=env.environmentUri, + key='CREATOR', + value='customtagowner', + ) + session.add(tags) + + Stack.create_stack( + session=session, + environment_uri=env.environmentUri, + target_type='environment', + target_uri=env.environmentUri, + target_label=env.label, + ) + + +@pytest.fixture(scope='module', autouse=True) +def env(db, environment_group): + def factory(org, envname, owner, group, account, region='eu-west-1', desc='test', role='iam_role', parameters=None): + with db.scoped_session() as session: + env = Environment( + organizationUri=org.organizationUri, + AwsAccountId=account, + region=region, + label=envname, + owner=owner, + tags=[], + description=desc, + SamlGroupName=group, + EnvironmentDefaultIAMRoleName=role, + EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{account}:role/{role}", + EnvironmentDefaultBucketName="defaultbucketname1234567789", + CDKRoleArn=f"arn:aws::{account}:role/EnvRole", + EnvironmentDefaultAthenaWorkGroup="DefaultWorkGroup" + ) + session.add(env) + session.commit() + _create_env_params(session, env, parameters) + _create_env_stack(session, env) + + return env + + yield factory + + +@pytest.fixture(scope='module', autouse=True) +def org(db): + def factory(name, group, user): + with db.scoped_session() as session: + org = Organization( + label=name, + name=name, + description=name, + owner=user.username, + SamlGroupName=group.name, + ) + session.add(org) + session.commit() + return org + yield factory + + +@pytest.fixture(scope='module') +def org_fixture(org, group, user): + return org('testorg', group, user) + + +@pytest.fixture(scope='module') +def env_params(): + # Can be overridden in the submodules + return {} + + +@pytest.fixture(scope='module') +def env_fixture(env, environment_group, org_fixture, user, group, tenant, env_params): + env1 = env(org_fixture, 'dev', 'alice', 'testadmins', '111111111111', parameters=env_params) + environment_group(env1, group.name) + yield env1 diff --git a/tests/modules/dashboards/__init__.py b/tests/modules/dashboards/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/dashboards/conftest.py b/tests/modules/dashboards/conftest.py new file mode 100644 index 000000000..26c9f0815 --- /dev/null +++ b/tests/modules/dashboards/conftest.py @@ -0,0 +1,49 @@ +from unittest.mock import MagicMock + +import pytest + + +@pytest.fixture(scope='module', autouse=True) +def env_params(): + # Overrides environment parameters for env_fixture + yield {"dashboardsEnabled": "true"} + + +@pytest.fixture(scope='module') +def dashboard(client, env_fixture, group, module_mocker): + mock_client = MagicMock() + module_mocker.patch( + 'dataall.modules.dashboards.services.dashboard_service.DashboardQuicksightClient', + mock_client + ) + response = client.query( + """ + mutation importDashboard( + $input:ImportDashboardInput, + ){ + importDashboard(input:$input){ + dashboardUri + name + label + DashboardId + created + owner + SamlGroupName + upvotes + userRoleForDashboard + } + } + """, + input={ + 'dashboardId': f'1234', + 'label': f'1234', + 'environmentUri': env_fixture.environmentUri, + 'SamlGroupName': group.name, + 'terms': ['term'], + }, + username='alice', + groups=[group.name], + ) + assert response.data.importDashboard.owner == 'alice' + assert response.data.importDashboard.SamlGroupName == group.name + yield response.data.importDashboard diff --git a/tests/modules/dashboards/test_dashboard_votes.py b/tests/modules/dashboards/test_dashboard_votes.py new file mode 100644 index 000000000..f56bd86b8 --- /dev/null +++ b/tests/modules/dashboards/test_dashboard_votes.py @@ -0,0 +1,40 @@ +from tests.modules.vote.test_vote import upvote_mutation, count_votes_query, get_vote_query + + +def test_dashboard_count_votes(client, dashboard, env_fixture): + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env_fixture.SamlGroupName + ) + assert response.data.countUpVotes == 0 + + +def test_dashboard_upvote(patch_es, client, env_fixture, dashboard): + + response = upvote_mutation( + client, dashboard.dashboardUri, 'dashboard', True, env_fixture.SamlGroupName + ) + assert response.data.upVote.upvote + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env_fixture.SamlGroupName + ) + assert response.data.countUpVotes == 1 + response = get_vote_query( + client, dashboard.dashboardUri, 'dashboard', env_fixture.SamlGroupName + ) + assert response.data.getVote.upvote + + response = upvote_mutation( + client, dashboard.dashboardUri, 'dashboard', False, env_fixture.SamlGroupName + ) + + assert not response.data.upVote.upvote + + response = get_vote_query( + client, dashboard.dashboardUri, 'dashboard', env_fixture.SamlGroupName + ) + assert not response.data.getVote.upvote + + response = count_votes_query( + client, dashboard.dashboardUri, 'dashboard', env_fixture.SamlGroupName + ) + assert response.data.countUpVotes == 0 \ No newline at end of file diff --git a/tests/modules/dashboards/test_dashboards.py b/tests/modules/dashboards/test_dashboards.py new file mode 100644 index 000000000..ec8e2e158 --- /dev/null +++ b/tests/modules/dashboards/test_dashboards.py @@ -0,0 +1,328 @@ +def test_update_dashboard( + client, env_fixture, group, patch_es, dashboard +): + response = client.query( + """ + mutation updateDashboard( + $input:UpdateDashboardInput, + ){ + updateDashboard(input:$input){ + dashboardUri + name + label + DashboardId + created + owner + SamlGroupName + } + } + """, + input={ + 'dashboardUri': dashboard.dashboardUri, + 'label': f'1234', + 'terms': ['term2'], + }, + username='alice', + groups=[group.name], + ) + assert response.data.updateDashboard.owner == 'alice' + assert response.data.updateDashboard.SamlGroupName == group.name + + +def test_list_dashboards(client, env_fixture, db, dashboard): + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + } + } + } + """, + filter={}, + username='alice', + ) + assert len(response.data.searchDashboards['nodes']) == 1 + + +def test_nopermissions_list_dashboards(client, env_fixture, db, dashboard): + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + } + } + } + """, + filter={}, + username='bob', + ) + assert len(response.data.searchDashboards['nodes']) == 0 + + +def test_get_dashboard(client, env_fixture, db, dashboard, group): + response = client.query( + """ + query GetDashboard($dashboardUri:String!){ + getDashboard(dashboardUri:$dashboardUri){ + dashboardUri + name + owner + SamlGroupName + description + label + created + tags + environment{ + label + region + } + organization{ + organizationUri + label + name + } + } + } + """, + dashboardUri=dashboard.dashboardUri, + username='alice', + groups=[group.name], + ) + assert response.data.getDashboard.owner == 'alice' + assert response.data.getDashboard.SamlGroupName == group.name + + +def test_request_dashboard_share( + client, + env_fixture, + db, + user, + group, + module_mocker, + dashboard, + patch_es, + group2, + user2, +): + response = client.query( + """ + mutation requestDashboardShare($dashboardUri:String!, $principalId:String!){ + requestDashboardShare(dashboardUri:$dashboardUri, principalId:$principalId){ + shareUri + status + } + } + """, + dashboardUri=dashboard.dashboardUri, + principalId=group2.name, + username=user2.username, + groups=[group2.name], + ) + share = response.data.requestDashboardShare + assert share.shareUri + assert share.status == 'REQUESTED' + + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + userRoleForDashboard + } + } + } + """, + filter={}, + username=user2.username, + groups=[group2.name], + ) + assert len(response.data.searchDashboards['nodes']) == 0 + + response = client.query( + """ + mutation approveDashboardShare($shareUri:String!){ + approveDashboardShare(shareUri:$shareUri){ + shareUri + status + } + } + """, + shareUri=share.shareUri, + username=user.username, + groups=[group.name], + ) + assert response.data.approveDashboardShare.status == 'APPROVED' + + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + userRoleForDashboard + } + } + } + """, + filter={}, + username=user2.username, + groups=[group2.name], + ) + assert len(response.data.searchDashboards['nodes']) == 1 + + response = client.query( + """ + query listDashboardShares($dashboardUri:String!,$filter:DashboardShareFilter!){ + listDashboardShares(dashboardUri:$dashboardUri,filter:$filter){ + count + nodes{ + dashboardUri + shareUri + } + } + } + """, + filter={}, + dashboardUri=dashboard.dashboardUri, + username=user.username, + groups=[group.name], + ) + assert len(response.data.listDashboardShares['nodes']) == 1 + + response = client.query( + """ + query GetDashboard($dashboardUri:String!){ + getDashboard(dashboardUri:$dashboardUri){ + dashboardUri + name + owner + SamlGroupName + description + label + created + tags + environment{ + label + region + } + organization{ + organizationUri + label + name + } + } + } + """, + dashboardUri=dashboard.dashboardUri, + username=user2.username, + groups=[group2.name], + ) + assert response.data.getDashboard.owner == 'alice' + assert response.data.getDashboard.SamlGroupName == group.name + + response = client.query( + """ + mutation rejectDashboardShare($shareUri:String!){ + rejectDashboardShare(shareUri:$shareUri){ + shareUri + status + } + } + """, + shareUri=share.shareUri, + username=user.username, + groups=[group.name], + ) + assert response.data.rejectDashboardShare.status == 'REJECTED' + + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + userRoleForDashboard + } + } + } + """, + filter={}, + username=user2.username, + groups=[group2.name], + ) + assert len(response.data.searchDashboards['nodes']) == 0 + + response = client.query( + """ + mutation shareDashboard($dashboardUri:String!, $principalId:String!){ + shareDashboard(dashboardUri:$dashboardUri, principalId:$principalId){ + shareUri + status + } + } + """, + dashboardUri=dashboard.dashboardUri, + principalId=group2.name, + username=user.username, + groups=[group.name], + ) + assert response.data.shareDashboard.shareUri + + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + userRoleForDashboard + } + } + } + """, + filter={}, + username=user2.username, + groups=[group2.name], + ) + assert len(response.data.searchDashboards['nodes']) == 1 + + +def test_delete_dashboard( + client, env_fixture, db, user, group, module_mocker, dashboard, patch_es +): + response = client.query( + """ + mutation deleteDashboard($dashboardUri:String!){ + deleteDashboard(dashboardUri:$dashboardUri) + } + """, + dashboardUri=dashboard.dashboardUri, + username=user.username, + groups=[group.name], + ) + assert response.data.deleteDashboard + response = client.query( + """ + query searchDashboards($filter:DashboardFilter!){ + searchDashboards(filter:$filter){ + count + nodes{ + dashboardUri + } + } + } + """, + filter={}, + username='alice', + ) + assert len(response.data.searchDashboards['nodes']) == 0 diff --git a/tests/modules/datapipelines/__init__.py b/tests/modules/datapipelines/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/datapipelines/conftest.py b/tests/modules/datapipelines/conftest.py new file mode 100644 index 000000000..158ba6c08 --- /dev/null +++ b/tests/modules/datapipelines/conftest.py @@ -0,0 +1,38 @@ +import pytest + + +@pytest.fixture(scope='module') +def env_params(env, org_fixture, user, group, tenant): + # Overrides the env_fixture environment parameters + yield {'pipelinesEnabled': 'True'} + + +@pytest.fixture(scope='module', autouse=True) +def pipeline(client, tenant, group, env_fixture): + response = client.query( + """ + mutation createDataPipeline ($input:NewDataPipelineInput){ + createDataPipeline(input:$input){ + DataPipelineUri + label + description + tags + owner + repo + userRoleForPipeline + } + } + """, + input={ + 'label': 'my pipeline', + 'SamlGroupName': group.name, + 'tags': [group.name], + 'environmentUri': env_fixture.environmentUri, + 'devStrategy': 'trunk', + }, + username='alice', + groups=[group.name], + ) + assert response.data.createDataPipeline.repo + assert response.data.createDataPipeline.DataPipelineUri + return response.data.createDataPipeline diff --git a/tests/modules/datapipelines/test_datapipelines.py b/tests/modules/datapipelines/test_datapipelines.py new file mode 100644 index 000000000..f38af03f0 --- /dev/null +++ b/tests/modules/datapipelines/test_datapipelines.py @@ -0,0 +1,182 @@ +import pytest + + +def test_create_pipeline_environment(client, tenant, group, env_fixture, pipeline): + response = client.query( + """ + mutation createDataPipelineEnvironment($input: NewDataPipelineEnvironmentInput) { + createDataPipelineEnvironment(input: $input) { + envPipelineUri + environmentUri + environmentLabel + pipelineUri + pipelineLabel + stage + region + AwsAccountId + samlGroupName + } + } + """, + input={ + 'stage': 'dev', + 'order': 1, + 'pipelineUri': pipeline.DataPipelineUri, + 'environmentUri': env_fixture.environmentUri, + 'environmentLabel': env_fixture.label, + 'samlGroupName': group.name + }, + username='alice', + groups=[group.name], + ) + assert response.data.createDataPipelineEnvironment.envPipelineUri + assert response.data.createDataPipelineEnvironment.stage == 'dev' + assert response.data.createDataPipelineEnvironment.AwsAccountId == env_fixture.AwsAccountId + + +def test_update_pipeline(client, tenant, group, pipeline): + response = client.query( + """ + mutation updateDataPipeline ($DataPipelineUri:String!,$input:UpdateDataPipelineInput){ + updateDataPipeline(DataPipelineUri:$DataPipelineUri,input:$input){ + DataPipelineUri + label + description + tags + owner + repo + userRoleForPipeline + } + } + """, + DataPipelineUri=pipeline.DataPipelineUri, + input={ + 'label': 'changed pipeline', + 'tags': [group.name], + }, + username='alice', + groups=[group.name], + ) + assert response.data.updateDataPipeline.label == 'changed pipeline' + + +def test_list_pipelines(client, env_fixture, db, user, group, pipeline): + response = client.query( + """ + query ListDataPipelines($filter:DataPipelineFilter){ + listDataPipelines(filter:$filter){ + count + nodes{ + DataPipelineUri + cloneUrlHttp + environment { + environmentUri + } + organization { + organizationUri + } + } + } + } + """, + filter=None, + username=user.username, + groups=[group.name], + ) + assert len(response.data.listDataPipelines['nodes']) == 1 + + +def test_nopermissions_pipelines(client, env_fixture, db, user, group, pipeline): + response = client.query( + """ + query listDataPipelines($filter:DataPipelineFilter){ + listDataPipelines(filter:$filter){ + count + nodes{ + DataPipelineUri + } + } + } + """, + filter=None, + username='bob', + ) + assert len(response.data.listDataPipelines['nodes']) == 0 + + +def test_get_pipeline(client, env_fixture, db, user, group, pipeline, module_mocker): + module_mocker.patch( + 'dataall.modules.datapipelines.services.datapipelines_service.DataPipelineService._get_creds_from_aws', + return_value=True, + ) + response = client.query( + """ + query getDataPipeline($DataPipelineUri:String!){ + getDataPipeline(DataPipelineUri:$DataPipelineUri){ + DataPipelineUri + } + } + """, + DataPipelineUri=pipeline.DataPipelineUri, + username=user.username, + groups=[group.name], + ) + assert response.data.getDataPipeline.DataPipelineUri == pipeline.DataPipelineUri + response = client.query( + """ + query getDataPipelineCredsLinux($DataPipelineUri:String!){ + getDataPipelineCredsLinux(DataPipelineUri:$DataPipelineUri) + } + """, + DataPipelineUri=pipeline.DataPipelineUri, + username=user.username, + groups=[group.name], + ) + assert response.data.getDataPipelineCredsLinux + + module_mocker.patch( + 'dataall.modules.datapipelines.services.datapipelines_service.DataPipelineService.ls', + return_value=[{'response': 'return value'}], + ) + response = client.query( + """ + query browseDataPipelineRepository($input:DataPipelineBrowseInput!){ + browseDataPipelineRepository(input:$input) + } + """, + input=dict(branch='master', DataPipelineUri=pipeline.DataPipelineUri), + username=user.username, + groups=[group.name], + ) + assert response.data.browseDataPipelineRepository + + +def test_delete_pipelines(client, env_fixture, db, user, group, pipeline): + response = client.query( + """ + mutation deleteDataPipeline($DataPipelineUri:String!,$deleteFromAWS:Boolean){ + deleteDataPipeline(DataPipelineUri:$DataPipelineUri,deleteFromAWS:$deleteFromAWS) + } + """, + DataPipelineUri=pipeline.DataPipelineUri, + deleteFromAWS=True, + username=user.username, + groups=[group.name], + ) + assert response.data.deleteDataPipeline + response = client.query( + """ + query ListDataPipelines($filter:DataPipelineFilter){ + listDataPipelines(filter:$filter){ + count + nodes{ + DataPipelineUri + } + } + } + """, + filter=None, + username=user.username, + groups=[group.name], + ) + assert len(response.data.listDataPipelines['nodes']) == 0 diff --git a/tests/modules/datapipelines/test_datapipelines_stack.py b/tests/modules/datapipelines/test_datapipelines_stack.py new file mode 100644 index 000000000..e0df11b2c --- /dev/null +++ b/tests/modules/datapipelines/test_datapipelines_stack.py @@ -0,0 +1,17 @@ +def test_datapipelines_update_stack_query(client, group, pipeline): + response = client.query( + """ + mutation updateStack($targetUri:String!, $targetType:String!){ + updateStack(targetUri:$targetUri, targetType:$targetType){ + stackUri + targetUri + name + } + } + """, + targetUri=pipeline.DataPipelineUri, + targetType='pipeline', + username='alice', + groups=[group.name], + ) + assert response.data.updateStack.targetUri == pipeline.DataPipelineUri diff --git a/tests/modules/datapipelines/test_pipeline_stack.py b/tests/modules/datapipelines/test_pipeline_stack.py new file mode 100644 index 000000000..78077f31d --- /dev/null +++ b/tests/modules/datapipelines/test_pipeline_stack.py @@ -0,0 +1,106 @@ +import os + +import pytest +from aws_cdk import App +from aws_cdk.assertions import Template + +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.datapipelines.cdk.datapipelines_pipeline import PipelineStack +from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment +from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository + +@pytest.fixture(scope='module', autouse=True) +def pipeline_db(db, env_fixture: Environment, group) -> DataPipeline: + with db.scoped_session() as session: + pipeline = DataPipeline( + label='thistable', + owner='me', + AwsAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + environmentUri=env_fixture.environmentUri, + repo='pipeline', + SamlGroupName=group.name, + devStrategy='trunk' + ) + session.add(pipeline) + yield pipeline + + +@pytest.fixture(scope='module', autouse=True) +def pip_envs(db, env_fixture: Environment, pipeline_db: DataPipeline) -> DataPipelineEnvironment: + with db.scoped_session() as session: + pipeline_env2 = DataPipelineEnvironment( + owner='me', + label=f"{pipeline_db.label}-{env_fixture.label}", + environmentUri=env_fixture.environmentUri, + environmentLabel=env_fixture.label, + pipelineUri=pipeline_db.DataPipelineUri, + pipelineLabel=pipeline_db.label, + envPipelineUri=f"{pipeline_db.DataPipelineUri}{env_fixture.environmentUri}", + AwsAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + stage='dev', + order=1, + samlGroupName='admins' + ) + + session.add(pipeline_env2) + + yield DatapipelinesRepository.query_pipeline_environments(session=session, uri=pipeline_db.DataPipelineUri) + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker, db, pipeline_db, env_fixture, pip_envs, org_fixture): + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack.get_engine', + return_value=db, + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_delegation_role_name', + return_value="dataall-pivot-role-name-pytest", + ) + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack.get_target', + return_value=pipeline_db, + ) + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack.get_pipeline_cicd_environment', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack.get_pipeline_environments', + return_value=pip_envs, + ) + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack._set_env_vars', + return_value=(os.environ, True) + ) + mocker.patch( + 'dataall.modules.datapipelines.cdk.datapipelines_pipeline.PipelineStack._check_repository', + return_value=False + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_target', + return_value=pipeline_db, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_environment', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_organization', + return_value=org_fixture, + ) + + +def test_resources_created(pipeline_db): + app = App() + stack = PipelineStack(app, 'Pipeline', target_uri=pipeline_db.DataPipelineUri) + template = Template.from_stack(stack) + # TODO: Add more assertions + template.resource_count_is("AWS::CodeCommit::Repository", 1) + template.resource_count_is("AWS::CodePipeline::Pipeline", 1) + template.resource_count_is("AWS::CodeBuild::Project", 1) diff --git a/tests/modules/datasets/__init__.py b/tests/modules/datasets/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/datasets/conftest.py b/tests/modules/datasets/conftest.py new file mode 100644 index 000000000..33342b44d --- /dev/null +++ b/tests/modules/datasets/conftest.py @@ -0,0 +1,400 @@ +import random +from unittest.mock import MagicMock + +import pytest + +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.core.stacks.db.stack_models import Stack +from dataall.modules.dataset_sharing.db.enums import ShareableType, PrincipalType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.services.share_permissions import SHARE_OBJECT_REQUESTER, SHARE_OBJECT_APPROVER +from dataall.modules.datasets.api.dataset.enums import ConfidentialityClassification +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ +from dataall.modules.datasets import Dataset, DatasetTable, DatasetStorageLocation + + +@pytest.fixture(scope='module', autouse=True) +def patch_dataset_methods(module_mocker): + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService.check_dataset_account', return_value=True + ) + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.DatasetService._deploy_dataset_stack', + return_value=True + ) + s3_mock_client = MagicMock() + glue_mock_client = MagicMock() + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_profiling_service.S3ProfilerClient', s3_mock_client + ) + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_profiling_service.GlueDatasetProfilerClient', glue_mock_client + ) + s3_mock_client().get_profiling_results_from_s3.return_value = '{"results": "yes"}' + glue_mock_client().run_job.return_value = True + + +@pytest.fixture(scope='module', autouse=True) +def dataset(client, patch_es, patch_dataset_methods): + cache = {} + + def factory( + org: Organization, + env: Environment, + name: str, + owner: str, + group: str, + confidentiality: str = None + ) -> Dataset: + key = f'{org.organizationUri}-{env.environmentUri}-{name}-{group}' + if cache.get(key): + print('found in cache ', cache[key]) + return cache.get(key) + response = client.query( + """ + mutation CreateDataset($input:NewDatasetInput){ + createDataset( + input:$input + ){ + datasetUri + label + description + AwsAccountId + S3BucketName + GlueDatabaseName + owner + region, + businessOwnerEmail + businessOwnerDelegationEmails + SamlAdminGroupName + GlueCrawlerName + tables{ + nodes{ + tableUri + } + } + locations{ + nodes{ + locationUri + } + } + stack{ + stack + status + stackUri + targetUri + accountid + region + stackid + link + outputs + resources + + } + topics + language + confidentiality + organization{ + organizationUri + label + } + shares{ + nodes{ + shareUri + } + } + terms{ + count + nodes{ + __typename + ...on Term { + nodeUri + path + label + } + } + } + environment{ + environmentUri + label + region + subscriptionsEnabled + subscriptionsProducersTopicImported + subscriptionsConsumersTopicImported + subscriptionsConsumersTopicName + subscriptionsProducersTopicName + organization{ + organizationUri + label + } + } + statistics{ + tables + locations + upvotes + } + } + } + """, + username=owner, + groups=[group], + input={ + 'owner': owner, + 'label': f'{name}', + 'description': 'test dataset {name}', + 'businessOwnerEmail': 'jeff@amazon.com', + 'tags': random_tags(), + 'businessOwnerDelegationEmails': random_emails(), + 'environmentUri': env.environmentUri, + 'SamlAdminGroupName': group or random_group(), + 'organizationUri': org.organizationUri, + 'confidentiality': confidentiality or ConfidentialityClassification.Unclassified.value + + }, + ) + print('==>', response) + return response.data.createDataset + + yield factory + + +@pytest.fixture(scope='module', autouse=True) +def table(db): + cache = {} + + def factory(dataset: Dataset, name, username) -> DatasetTable: + key = f'{dataset.datasetUri}-{name}' + if cache.get(key): + return cache.get(key) + with db.scoped_session() as session: + table = DatasetTable( + name=name, + label=name, + owner=username, + datasetUri=dataset.datasetUri, + GlueDatabaseName=dataset.GlueDatabaseName, + GlueTableName=name, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.S3BucketName, + S3Prefix=f'{name}', + ) + session.add(table) + session.commit() + + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=DATASET_TABLE_READ, + resource_uri=table.tableUri, + resource_type=DatasetTable.__name__ + ) + return table + + yield factory + + +@pytest.fixture(scope='module') +def dataset_fixture(env_fixture, org_fixture, dataset, group) -> Dataset: + yield dataset( + org=org_fixture, + env=env_fixture, + name='dataset1', + owner=env_fixture.owner, + group=group.name, + ) + + +@pytest.fixture(scope='module') +def table_fixture(db, dataset_fixture, table, group, user): + table1 = table(dataset=dataset_fixture, name="table1", username=user.username) + + with db.scoped_session() as session: + ResourcePolicy.attach_resource_policy( + session=session, + group=group.groupUri, + permissions=DATASET_TABLE_READ, + resource_uri=table1.tableUri, + resource_type=DatasetTable.__name__, + ) + yield table1 + + +@pytest.fixture(scope='module') +def folder_fixture(db, dataset_fixture): + with db.scoped_session() as session: + location = DatasetStorageLocation( + datasetUri=dataset_fixture.datasetUri, + AWSAccountId='12345678901', + S3Prefix='S3prefix', + label='label', + owner='foo', + name='name', + S3BucketName='S3BucketName', + region='eu-west-1', + ) + session.add(location) + yield location + + +@pytest.fixture(scope="module") +def dataset_model(db): + def factory( + organization: Organization, + environment: Environment, + label: str, + ) -> Dataset: + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=organization.organizationUri, + environmentUri=environment.environmentUri, + label=label, + owner=environment.owner, + stewards=environment.SamlGroupName, + SamlAdminGroupName=environment.SamlGroupName, + businessOwnerDelegationEmails=["foo@amazon.com"], + name=label, + S3BucketName=label, + GlueDatabaseName="gluedatabase", + KmsAlias="kmsalias", + AwsAccountId=environment.AwsAccountId, + region=environment.region, + IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", + IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", + ) + session.add(dataset) + session.commit() + return dataset + + yield factory + + +@pytest.fixture(scope='module', autouse=True) +def location(db): + cache = {} + + def factory(dataset: Dataset, name, username) -> DatasetStorageLocation: + key = f'{dataset.datasetUri}-{name}' + if cache.get(key): + return cache.get(key) + with db.scoped_session() as session: + ds_location = DatasetStorageLocation( + name=name, + label=name, + owner=username, + datasetUri=dataset.datasetUri, + S3BucketName=dataset.S3BucketName, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3Prefix=f'{name}', + ) + session.add(ds_location) + return ds_location + + yield factory + + +@pytest.fixture(scope="module") +def share_item(db): + def factory( + share: ShareObject, + table: DatasetTable, + status: str + ) -> ShareObjectItem: + with db.scoped_session() as session: + share_item = ShareObjectItem( + shareUri=share.shareUri, + owner="alice", + itemUri=table.tableUri, + itemType=ShareableType.Table.value, + itemName=table.name, + status=status, + ) + session.add(share_item) + session.commit() + return share_item + + yield factory + + +@pytest.fixture(scope="module") +def share(db): + def factory( + dataset: Dataset, + environment: Environment, + env_group: EnvironmentGroup, + owner: str, + status: str + ) -> ShareObject: + with db.scoped_session() as session: + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=environment.environmentUri, + owner=owner, + groupUri=env_group.groupUri, + principalId=env_group.groupUri, + principalType=PrincipalType.Group.value, + principalIAMRoleName=env_group.environmentIAMRoleName, + status=status, + ) + session.add(share) + session.commit() + + ResourcePolicy.attach_resource_policy( + session=session, + group=env_group.groupUri, + permissions=SHARE_OBJECT_REQUESTER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.SamlAdminGroupName, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + ResourcePolicy.attach_resource_policy( + session=session, + group=dataset.stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + session.commit() + return share + + yield factory + + +def random_email(): + names = ['andy', 'bill', 'satya', 'sundar'] + corps = ['google.com', 'amazon.com', 'microsoft.com'] + return f'{random.choice(names)}@{random.choice(corps)}' + + +def random_emails(): + emails = [] + for i in range(1, 2 + random.choice([2, 3, 4])): + emails.append(random_email()) + return emails + + +def random_group(): + prefixes = ['big', 'small', 'pretty', 'shiny'] + names = ['team', 'people', 'group'] + lands = ['snow', 'ice', 'green', 'high'] + return f'{random.choice(prefixes).capitalize()}{random.choice(names).capitalize()}From{random.choice(lands).capitalize()}land' + + +def random_tag(): + return random.choice( + ['sales', 'finances', 'sites', 'people', 'products', 'partners', 'operations'] + ) + + +def random_tags(): + return [random_tag() for i in range(1, random.choice([2, 3, 4, 5]))] + diff --git a/tests/modules/datasets/tasks/__init__.py b/tests/modules/datasets/tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/datasets/tasks/conftest.py b/tests/modules/datasets/tasks/conftest.py new file mode 100644 index 000000000..43f888fe6 --- /dev/null +++ b/tests/modules/datasets/tasks/conftest.py @@ -0,0 +1,152 @@ +import pytest + +from dataall.core.cognito_groups.db.cognito_group_models import Group +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.modules.dataset_sharing.db.enums import ShareableType, ShareItemStatus, ShareObjectStatus, PrincipalType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, DatasetTable, Dataset + + +@pytest.fixture(scope="module") +def create_dataset(db): + def factory( + organization: Organization, + environment: Environment, + label: str, + ) -> Dataset: + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=organization.organizationUri, + environmentUri=environment.environmentUri, + label=label, + owner=environment.owner, + SamlAdminGroupName=environment.SamlGroupName, + businessOwnerDelegationEmails=["foo@amazon.com"], + name=label, + S3BucketName=label, + GlueDatabaseName="gluedatabase", + KmsAlias="kmsalias", + AwsAccountId=environment.AwsAccountId, + region=environment.region, + IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", + IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", + ) + session.add(dataset) + session.commit() + return dataset + + yield factory + + +@pytest.fixture(scope="module") +def location(db): + def factory(dataset: Dataset, label: str) -> DatasetStorageLocation: + + with db.scoped_session() as session: + ds_location = DatasetStorageLocation( + name=label, + label=label, + owner=dataset.owner, + datasetUri=dataset.datasetUri, + S3BucketName=dataset.S3BucketName, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3Prefix=f"{label}", + ) + session.add(ds_location) + return ds_location + + yield factory + + +@pytest.fixture(scope='module') +def table(db): + def factory(dataset: Dataset, label: str) -> DatasetTable: + + with db.scoped_session() as session: + table = DatasetTable( + name=label, + label=label, + owner=dataset.owner, + datasetUri=dataset.datasetUri, + GlueDatabaseName=dataset.GlueDatabaseName, + GlueTableName=label, + region=dataset.region, + AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.S3BucketName, + S3Prefix=f'{label}', + ) + session.add(table) + return table + + yield factory + + +@pytest.fixture(scope="module") +def share(db): + def factory( + dataset: Dataset, + environment: Environment, + env_group: EnvironmentGroup + ) -> ShareObject: + with db.scoped_session() as session: + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=environment.environmentUri, + owner="bob", + principalId=environment.SamlGroupName, + principalType=PrincipalType.Group.value, + principalIAMRoleName=env_group.environmentIAMRoleName, + status=ShareObjectStatus.Approved.value, + ) + session.add(share) + session.commit() + return share + + yield factory + + +@pytest.fixture(scope="module") +def share_item_folder(db): + def factory( + share: ShareObject, + location: DatasetStorageLocation, + ) -> ShareObjectItem: + with db.scoped_session() as session: + share_item = ShareObjectItem( + shareUri=share.shareUri, + owner="alice", + itemUri=location.locationUri, + itemType=ShareableType.StorageLocation.value, + itemName=location.name, + status=ShareItemStatus.Share_Approved.value, + ) + session.add(share_item) + session.commit() + return share_item + + yield factory + + +@pytest.fixture(scope="module") +def share_item_table(db): + def factory( + share: ShareObject, + table: DatasetTable, + status: str, + ) -> ShareObjectItem: + with db.scoped_session() as session: + share_item = ShareObjectItem( + shareUri=share.shareUri, + owner="alice", + itemUri=table.tableUri, + itemType=ShareableType.Table.value, + itemName=table.name, + status=status, + ) + session.add(share_item) + session.commit() + return share_item + + yield factory diff --git a/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py b/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py new file mode 100644 index 000000000..7ed40e9b4 --- /dev/null +++ b/tests/modules/datasets/tasks/test_dataset_catalog_indexer.py @@ -0,0 +1,61 @@ +import pytest + +from dataall.modules.catalog.tasks.catalog_indexer_task import index_objects +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset + + +@pytest.fixture(scope='module', autouse=True) +def sync_dataset(org_fixture, env_fixture, db): + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=org_fixture.organizationUri, + environmentUri=env_fixture.environmentUri, + label='label', + owner='foo', + SamlAdminGroupName='foo', + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + ) + session.add(dataset) + yield dataset + + +@pytest.fixture(scope='module', autouse=True) +def table(org, env, db, sync_dataset): + with db.scoped_session() as session: + table = DatasetTable( + datasetUri=sync_dataset.datasetUri, + AWSAccountId='12345678901', + S3Prefix='S3prefix', + label='label', + owner='foo', + name='name', + GlueTableName='table1', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + region='eu-west-1', + ) + session.add(table) + yield table + + +def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): + mocker.patch( + 'dataall.modules.datasets.indexers.table_indexer.DatasetTableIndexer.upsert_all', + return_value=[table] + ) + mocker.patch( + 'dataall.modules.datasets.indexers.dataset_indexer.DatasetIndexer.upsert', return_value=sync_dataset + ) + indexed_objects_counter = index_objects( + engine=db + ) + assert indexed_objects_counter == 2 diff --git a/tests/modules/datasets/tasks/test_dataset_policies.py b/tests/modules/datasets/tasks/test_dataset_policies.py new file mode 100644 index 000000000..83f36b2c0 --- /dev/null +++ b/tests/modules/datasets/tasks/test_dataset_policies.py @@ -0,0 +1,111 @@ +from unittest.mock import MagicMock + +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.datasets.tasks.bucket_policy_updater import BucketPoliciesUpdater +import pytest + + +@pytest.fixture(scope='module', autouse=True) +def sync_dataset(org_fixture, env_fixture, db): + with db.scoped_session() as session: + dataset = Dataset( + organizationUri=org_fixture.organizationUri, + environmentUri=env_fixture.environmentUri, + label='label', + owner='foo', + SamlAdminGroupName='foo', + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + imported=True, + ) + session.add(dataset) + yield dataset + + +@pytest.fixture(scope='module', autouse=True) +def table(org, env, db, sync_dataset): + with db.scoped_session() as session: + table = DatasetTable( + datasetUri=sync_dataset.datasetUri, + AWSAccountId='12345678901', + S3Prefix='S3prefix', + label='label', + owner='foo', + name='name', + GlueTableName='table1', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + region='eu-west-1', + ) + session.add(table) + yield table + + +def test_prefix_delta(): + s = 's3://insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2/ship_plan.delta/_symlink_format_manifest/*' + delta_path = s.split('/_symlink_format_manifest')[0].split('/')[-1] + prefix = s.split(f'/{delta_path}')[0] + assert ( + prefix + == 's3://insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2' + ) + prefix = 'arn:aws:s3:::insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2' + bucket = prefix.split('arn:aws:s3:::')[1].split('/')[0] + assert bucket == 'insite-data-lake-core-alpha-eu-west-1' + + +def test_group_prefixes_by_accountid(db, mocker): + statements = {} + updater = BucketPoliciesUpdater(db) + updater.group_prefixes_by_accountid('675534', 'prefix1', statements) + updater.group_prefixes_by_accountid('675534', 'prefix2', statements) + updater.group_prefixes_by_accountid('675534', 'prefix3', statements) + updater.group_prefixes_by_accountid('675534', 'prefix3', statements) + updater.group_prefixes_by_accountid('3455', 'prefix4', statements) + assert len(set(statements['675534'])) == 3 + policy = { + 'Version': '2012-10-17', + 'Statement': [ + { + 'Sid': f'OwnerAccount', + 'Effect': 'Allow', + 'Action': ['s3:*'], + 'Resource': [ + f'arn:aws:s3:::', + f'arn:aws:s3:::', + ], + 'Principal': {'AWS': f'arn:aws:iam::root'}, + }, + { + 'Sid': f'DH675534', + 'Effect': 'Allow', + 'Action': ['s3:*'], + 'Resource': [ + f'prefix3', + f'prefix2', + ], + 'Principal': {'AWS': '675534'}, + }, + ] + } + BucketPoliciesUpdater.update_policy(statements, policy) + assert policy + + +def test_handler(org, env, db, sync_dataset, mocker): + s3_client = MagicMock() + mocker.patch('dataall.modules.datasets.tasks.bucket_policy_updater.S3DatasetBucketPolicyClient', s3_client) + s3_client().get_bucket_policy.return_value = {'Version': '2012-10-17', 'Statement': []} + s3_client().put_bucket_policy.return_value = {'status': 'SUCCEEDED'} + + updater = BucketPoliciesUpdater(db) + assert len(updater.sync_imported_datasets_bucket_policies()) == 1 + assert updater.sync_imported_datasets_bucket_policies()[0]['status'] == 'SUCCEEDED' diff --git a/tests/modules/datasets/tasks/test_dataset_subscriptions.py b/tests/modules/datasets/tasks/test_dataset_subscriptions.py new file mode 100644 index 000000000..e0453d7e0 --- /dev/null +++ b/tests/modules/datasets/tasks/test_dataset_subscriptions.py @@ -0,0 +1,105 @@ +from unittest.mock import MagicMock + +import pytest + +from dataall.base.db import Engine +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.dataset_sharing.db.enums import ShareObjectStatus, ShareItemStatus, ShareableType, PrincipalType +from dataall.modules.dataset_sharing.db.share_object_models import ShareObjectItem, ShareObject +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.datasets.tasks.dataset_subscription_task import DatasetSubscriptionService + + +@pytest.fixture(scope='module') +def otherenv(org_fixture, db): + with db.scoped_session() as session: + env = Environment( + organizationUri=org_fixture.organizationUri, + AwsAccountId='987654321', + region='eu-west-1', + label='org', + owner='bob', + tags=[], + description='desc', + SamlGroupName='admins', + EnvironmentDefaultIAMRoleName='EnvRole', + EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', + CDKRoleArn='arn:aws::123456789012:role/EnvRole', + userRoleInEnvironment='999', + ) + session.add(env) + yield env + + +@pytest.fixture(scope='module') +def dataset(create_dataset, org_fixture, env_fixture): + yield create_dataset(org_fixture, env_fixture, 'dataset') + + +@pytest.fixture(scope='module') +def share( + dataset: Dataset, + db: Engine, + otherenv: Environment, +): + with db.scoped_session() as session: + + table = DatasetTable( + label='foo', + name='foo', + owner='alice', + description='test table', + tags=['a', 'b'], + datasetUri=dataset.datasetUri, + tableUri='foo', + S3Prefix='s3://dataset/testtable/csv/', + GlueDatabaseName=dataset.GlueDatabaseName, + GlueTableName='foo', + S3BucketName=dataset.S3BucketName, + AWSAccountId=dataset.AwsAccountId, + region=dataset.region, + ) + session.add(table) + share = ShareObject( + datasetUri=dataset.datasetUri, + environmentUri=otherenv.environmentUri, + owner='bob', + principalId='group2', + principalType=PrincipalType.Environment.value, + status=ShareObjectStatus.Approved.value, + ) + session.add(share) + session.commit() + share_item = ShareObjectItem( + shareUri=share.shareUri, + owner='alice', + itemUri=table.tableUri, + itemType=ShareableType.Table.value, + itemName=table.GlueTableName, + GlueDatabaseName=table.GlueDatabaseName, + GlueTableName=table.GlueTableName, + status=ShareItemStatus.Share_Approved.value, + ) + session.add(share_item) + + +def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): + sns_client = MagicMock() + mocker.patch( + 'dataall.modules.datasets.tasks.dataset_subscription_task.SnsDatasetClient', + sns_client + ) + sns_client.publish_dataset_message.return_value = True + subscriber = DatasetSubscriptionService(db) + messages = [ + { + 'prefix': 's3://dataset/testtable/csv/', + 'accountid': '123456789012', + 'region': 'eu-west-1', + } + ] + envs = subscriber.get_environments(db) + assert envs + queues = subscriber.get_queues(envs) + assert queues + assert subscriber.notify_consumers(db, messages) diff --git a/tests/modules/datasets/tasks/test_dataset_tables_sync.py b/tests/modules/datasets/tasks/test_dataset_tables_sync.py new file mode 100644 index 000000000..cf1c181e8 --- /dev/null +++ b/tests/modules/datasets/tasks/test_dataset_tables_sync.py @@ -0,0 +1,108 @@ +from unittest.mock import MagicMock + +import pytest +from dataall.modules.datasets_base.db.dataset_models import DatasetTable +from dataall.modules.datasets.tasks.tables_syncer import sync_tables + + +@pytest.fixture(scope='module', autouse=True) +def sync_dataset(create_dataset, org_fixture, env_fixture, db): + yield create_dataset(org_fixture, env_fixture, 'dataset') + + +@pytest.fixture(scope='module', autouse=True) +def table_fixture(org, env, db, sync_dataset): + with db.scoped_session() as session: + table = DatasetTable( + datasetUri=sync_dataset.datasetUri, + AWSAccountId='12345678901', + S3Prefix='S3prefix', + label='label', + owner='foo', + name='name', + GlueTableName='table1', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + region='eu-west-1', + ) + session.add(table) + yield table + + +def test_tables_sync(db, org, env, sync_dataset, table_fixture, mocker): + mock_crawler = MagicMock() + mocker.patch('dataall.modules.datasets.tasks.tables_syncer.DatasetCrawler', mock_crawler) + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_delegation_role_arn", + return_value="arn:role", + ) + + mock_crawler().list_glue_database_tables.return_value = [ + { + 'Name': 'new_table', + 'DatabaseName': sync_dataset.GlueDatabaseName, + 'StorageDescriptor': { + 'Columns': [ + { + 'Name': 'col1', + 'Type': 'string', + 'Comment': 'comment_col', + 'Parameters': {'colp1': 'p1'}, + }, + ], + 'Location': f's3://{sync_dataset.S3BucketName}/table1', + 'Parameters': {'p1': 'p1'}, + }, + 'PartitionKeys': [ + { + 'Name': 'partition1', + 'Type': 'string', + 'Comment': 'comment_partition', + 'Parameters': {'partition_1': 'p1'}, + }, + ], + }, + { + 'Name': 'table1', + 'DatabaseName': sync_dataset.GlueDatabaseName, + 'StorageDescriptor': { + 'Columns': [ + { + 'Name': 'col1', + 'Type': 'string', + 'Comment': 'comment_col', + 'Parameters': {'colp1': 'p1'}, + }, + ], + 'Location': f's3://{sync_dataset.S3BucketName}/table1', + 'Parameters': {'p1': 'p1'}, + }, + 'PartitionKeys': [ + { + 'Name': 'partition1', + 'Type': 'string', + 'Comment': 'comment_partition', + 'Parameters': {'partition_1': 'p1'}, + }, + ], + }, + ] + + mocker.patch( + 'dataall.modules.datasets.tasks.tables_syncer.is_assumable_pivot_role', return_value=True + ) + + mock_client = MagicMock() + mocker.patch("dataall.modules.datasets.tasks.tables_syncer.LakeFormationTableClient", mock_client) + mock_client.grant_principals_all_table_permissions = True + + processed_tables = sync_tables(engine=db) + assert len(processed_tables) == 2 + with db.scoped_session() as session: + saved_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.GlueTableName == 'table1') + .first() + ) + assert saved_table + assert saved_table.GlueTableName == 'table1' diff --git a/tests/modules/datasets/tasks/test_lf_share_manager.py b/tests/modules/datasets/tasks/test_lf_share_manager.py new file mode 100644 index 000000000..88d7ea1a4 --- /dev/null +++ b/tests/modules/datasets/tasks/test_lf_share_manager.py @@ -0,0 +1,704 @@ +""" +Testing LF manager class methods invoked in same account and cross account LF share processors. +Remarks + +""" +from unittest.mock import MagicMock + +import boto3 +import pytest + +from typing import Callable + +from dataall.core.cognito_groups.db.cognito_group_models import Group +from dataall.core.organizations.db.organization_models import Organization +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.modules.dataset_sharing.api.enums import ShareItemStatus +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject, ShareObjectItem +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset +from dataall.modules.dataset_sharing.services.dataset_alarm_service import DatasetAlarmService + +from dataall.modules.dataset_sharing.services.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare +from dataall.modules.dataset_sharing.services.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare + + +SOURCE_ENV_ACCOUNT = "1" * 12 +SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" + + +TARGET_ACCOUNT_ENV = "2" * 12 +TARGET_ACCOUNT_ENV_ROLE_NAME = "dataall-ConsumersEnvironment-r71ucp4m" + +LF_CLIENT = "dataall.modules.dataset_sharing.aws.lakeformation_client.LakeFormationClient" + + +@pytest.fixture(scope="module") +def source_environment(env: Callable, org_fixture: Organization, group: Group) -> Environment: + yield env( + org=org_fixture, + account=SOURCE_ENV_ACCOUNT, + envname="source_environment", + owner=group.owner, + group=group.name, + role=SOURCE_ENV_ROLE_NAME, + ) + + +@pytest.fixture(scope="module") +def source_environment_group(environment_group: Callable, source_environment: Environment, + group: Group) -> EnvironmentGroup: + yield environment_group( + environment=source_environment, + group=group.name + ) + + +@pytest.fixture(scope="module") +def source_environment_group_requesters(environment_group: Callable, source_environment: Environment, + group2: Group) -> EnvironmentGroup: + yield environment_group( + environment=source_environment, + group=group2.name + ) + + +@pytest.fixture(scope="module") +def target_environment(env: Callable, org_fixture: Organization, group2: Group) -> Environment: + yield env( + org=org_fixture, + account=TARGET_ACCOUNT_ENV, + envname="target_environment", + owner=group2.owner, + group=group2.name, + role=TARGET_ACCOUNT_ENV_ROLE_NAME, + ) + + +@pytest.fixture(scope="module") +def target_environment_group(environment_group: Callable, target_environment: Environment, + group2: Group) -> EnvironmentGroup: + yield environment_group( + environment=target_environment, + group=group2.name + ) + + +@pytest.fixture(scope="module") +def dataset1(create_dataset: Callable, org_fixture: Organization, source_environment: Environment) -> Dataset: + yield create_dataset( + organization=org_fixture, + environment=source_environment, + label="dataset1" + ) + + +@pytest.fixture(scope="module") +def table1(table: Callable, dataset1: Dataset) -> DatasetTable: + yield table( + dataset=dataset1, + label="table1" + ) + + +@pytest.fixture(scope="module") +def table2(table: Callable, dataset1: Dataset) -> DatasetTable: + yield table( + dataset=dataset1, + label="table2" + ) + + +@pytest.fixture(scope="module") +def share_same_account( + share: Callable, dataset1: Dataset, source_environment: Environment, + source_environment_group_requesters: EnvironmentGroup) -> ShareObject: + yield share( + dataset=dataset1, + environment=source_environment, + env_group=source_environment_group_requesters + ) + + +@pytest.fixture(scope="module") +def share_cross_account( + share: Callable, dataset1: Dataset, target_environment: Environment, + target_environment_group: EnvironmentGroup) -> ShareObject: + yield share( + dataset=dataset1, + environment=target_environment, + env_group=target_environment_group + ) + + +@pytest.fixture(scope="module") +def share_item_same_account(share_item_table: Callable, share_same_account: ShareObject, + table1: DatasetTable) -> ShareObjectItem: + yield share_item_table( + share=share_same_account, + table=table1, + status=ShareItemStatus.Share_Approved.value + ) + +@pytest.fixture(scope="module") +def revoke_item_same_account(share_item_table: Callable, share_same_account: ShareObject, + table2: DatasetTable) -> ShareObjectItem: + yield share_item_table( + share=share_same_account, + table=table2, + status=ShareItemStatus.Revoke_Approved.value + ) + +@pytest.fixture(scope="module") +def share_item_cross_account(share_item_table: Callable, share_cross_account: ShareObject, + table1: DatasetTable) -> ShareObjectItem: + yield share_item_table( + share=share_cross_account, + table=table1, + status=ShareItemStatus.Share_Approved.value + ) + + +@pytest.fixture(scope="module") +def revoke_item_cross_account(share_item_table: Callable, share_cross_account: ShareObject, + table2: DatasetTable) -> ShareObjectItem: + yield share_item_table( + share=share_cross_account, + table=table2, + status=ShareItemStatus.Revoke_Approved.value + ) + + +@pytest.fixture(scope="module", autouse=True) +def processor_cross_account(db, dataset1, share_cross_account, table1, table2, source_environment, target_environment, + target_environment_group): + with db.scoped_session() as session: + processor = ProcessLFCrossAccountShare( + session, + dataset1, + share_cross_account, + [table1], + [table2], + source_environment, + target_environment, + target_environment_group, + ) + yield processor + + +@pytest.fixture(scope="module", autouse=True) +def processor_same_account(db, dataset1, share_same_account, table1, source_environment, + source_environment_group_requesters): + with db.scoped_session() as session: + processor = ProcessLFSameAccountShare( + session, + dataset1, + share_same_account, + [table1], + [table2], + source_environment, + source_environment, + source_environment_group_requesters, + ) + yield processor + + +@pytest.fixture(scope="function") +def mock_glue_client(mocker): + mock_client = MagicMock() + mocker.patch( + "dataall.modules.dataset_sharing.services.share_managers.lf_share_manager.GlueClient", + mock_client + ) + yield mock_client + + +def test_init(processor_same_account, processor_cross_account): + assert processor_same_account.dataset + assert processor_same_account.share + + +def test_build_shared_db_name( + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + dataset1: Dataset, + share_same_account: ShareObject, + share_cross_account: ShareObject, +): + # Given a dataset and its share, build db_share name + # Then, it should return + assert processor_same_account.build_shared_db_name() == (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254] + assert processor_cross_account.build_shared_db_name() == (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254] + + +def test_get_share_principals( + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + source_environment: Environment, + target_environment: Environment, + share_same_account: ShareObject, + share_cross_account: ShareObject, +): + # Given a dataset and its share, build db_share name + # Then, it should return + assert processor_same_account.get_share_principals() == [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] + assert processor_cross_account.get_share_principals() == [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] + + +def test_create_shared_database( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + mocker, + mock_glue_client +): + mock_glue_client().create_database.return_value = True + + lf_mock_pr = mocker.patch( + f"{LF_CLIENT}.grant_pivot_role_all_database_permissions", + return_value=True, + ) + mocker.patch( + "dataall.base.aws.sts.SessionHelper.remote_session", + return_value=boto3.Session(), + ) + lf_mock = mocker.patch( + f"{LF_CLIENT}.grant_permissions_to_database", + return_value=True, + ) + # When + processor_same_account.create_shared_database( + target_environment=source_environment, + dataset=dataset1, + shared_db_name=(dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], + principals=[f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] + ) + + # Then + mock_glue_client().create_database.assert_called_once() + lf_mock_pr.assert_called_once() + lf_mock.assert_called_once() + + # Reset mocks + mock_glue_client().create_database.reset_mock() + lf_mock_pr.reset_mock() + lf_mock.reset_mock() + + # When + processor_cross_account.create_shared_database( + target_environment=target_environment, + dataset=dataset1, + shared_db_name=(dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], + principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] + ) + + # Then + mock_glue_client().create_database.assert_called_once() + lf_mock_pr.assert_called_once() + lf_mock.assert_called_once() + + +def test_check_share_item_exists_on_glue_catalog( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + table1: DatasetTable, + share_item_same_account: ShareObjectItem, + share_item_cross_account: ShareObjectItem, + mocker, + mock_glue_client, +): + + mock_glue_client().table_exists.return_value = True + + # When + processor_same_account.check_share_item_exists_on_glue_catalog( + share_item=share_item_same_account, + table=table1 + ) + # Then + mock_glue_client().table_exists.assert_called_once() + mock_glue_client().table_exists.reset_mock() + + # When + processor_cross_account.check_share_item_exists_on_glue_catalog( + share_item=share_item_cross_account, + table=table1 + ) + # Then + mock_glue_client().table_exists.assert_called_once() + + +def test_build_share_data( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table1: DatasetTable, +): + data_same_account = { + 'source': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'database': table1.GlueDatabaseName, + 'tablename': table1.GlueTableName, + }, + 'target': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'principals': [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"], + 'database': (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], + }, + } + + data = processor_same_account.build_share_data(table=table1) + assert data == data_same_account + + data_cross_account = { + 'source': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'database': table1.GlueDatabaseName, + 'tablename': table1.GlueTableName, + }, + 'target': { + 'accountid': target_environment.AwsAccountId, + 'region': target_environment.region, + 'principals': [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], + 'database': (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], + }, + } + + data = processor_cross_account.build_share_data(table=table1) + assert data == data_cross_account + + +def test_create_resource_link( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table1: DatasetTable, + mocker, + mock_glue_client, +): + sts_mock = mocker.patch( + "dataall.base.aws.sts.SessionHelper.remote_session", + return_value=boto3.Session(), + ) + glue_mock = mock_glue_client().create_resource_link + glue_mock.return_value = True + + lf_mock_1 = mocker.patch( + f"{LF_CLIENT}.grant_resource_link_permission", + return_value=True, + ) + lf_mock_2 = mocker.patch( + f"{LF_CLIENT}.grant_resource_link_permission_on_target", + return_value=True, + ) + + # When + data_same_account = { + 'source': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'database': table1.GlueDatabaseName, + 'tablename': table1.GlueTableName, + }, + 'target': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'principals': [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"], + 'database': (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], + }, + } + processor_same_account.create_resource_link(**data_same_account) + + # Then + sts_mock.assert_called_once() + glue_mock.assert_called_once() + lf_mock_1.assert_called_once() + lf_mock_2.assert_called_once() + + # Reset mocks + sts_mock.reset_mock() + glue_mock.reset_mock() + lf_mock_1.reset_mock() + lf_mock_2.reset_mock() + + + data_cross_account = { + 'source': { + 'accountid': source_environment.AwsAccountId, + 'region': source_environment.region, + 'database': table1.GlueDatabaseName, + 'tablename': table1.GlueTableName, + }, + 'target': { + 'accountid': target_environment.AwsAccountId, + 'region': target_environment.region, + 'principals': [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], + 'database': (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], + }, + } + processor_cross_account.create_resource_link(**data_cross_account) + + # Then + sts_mock.assert_called_once() + glue_mock.assert_called_once() + lf_mock_1.assert_called_once() + lf_mock_2.assert_called_once() + + pass + + +def test_revoke_table_resource_link_access( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table2: DatasetTable, + mocker, + mock_glue_client +): + + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.remote_session", + return_value=boto3.Session(), + ) + + lf_mock = mocker.patch( + f"{LF_CLIENT}.batch_revoke_permissions", + return_value=True, + ) + + processor_same_account.revoke_table_resource_link_access( + table=table2, + principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] + ) + # Then + glue_mock.assert_called_once() + lf_mock.assert_called_once() + + # Reset mocks + glue_mock.reset_mock() + lf_mock.reset_mock() + + processor_cross_account.revoke_table_resource_link_access( + table=table2, + principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], + ) + # Then + glue_mock.assert_called_once() + lf_mock.assert_called_once() + + +def test_revoke_source_table_access( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table2: DatasetTable, + mocker, + mock_glue_client +): + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True + + lf_mock = mocker.patch( + f"{LF_CLIENT}.revoke_source_table_access", + return_value=True, + ) + + processor_same_account.revoke_source_table_access( + table=table2, + principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] + ) + # Then + glue_mock.assert_called_once() + lf_mock.assert_called_once() + + # Reset mocks + glue_mock.reset_mock() + lf_mock.reset_mock() + + processor_cross_account.revoke_source_table_access( + table=table2, + principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] + ) + # Then + glue_mock.assert_called_once() + lf_mock.assert_called_once() + + +def test_delete_resource_link_table( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table2: DatasetTable, + mock_glue_client +): + glue_mock = mock_glue_client().table_exists + glue_mock.return_value = True, + + glue_mock2 = mock_glue_client().delete_table + glue_mock2.return_value = True, + + + processor_same_account.delete_resource_link_table( + table=table2 + ) + # Then + glue_mock.assert_called_once() + glue_mock2.assert_called_once() + + # Reset mocks + glue_mock.reset_mock() + glue_mock2.reset_mock() + + processor_cross_account.delete_resource_link_table( + table=table2 + ) + # Then + glue_mock.assert_called_once() + glue_mock2.assert_called_once() + + +def test_delete_shared_database( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table1: DatasetTable, + mock_glue_client +): + glue_mock = mock_glue_client().delete_database + glue_mock.return_value = True + + processor_same_account.delete_shared_database() + # Then + glue_mock.assert_called_once() + + # Reset mocks + glue_mock.reset_mock() + + processor_cross_account.delete_shared_database() + # Then + glue_mock.assert_called_once() + + +def test_revoke_external_account_access_on_source_account( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_same_account: ShareObject, + share_cross_account: ShareObject, + source_environment: Environment, + target_environment: Environment, + dataset1: Dataset, + table1: DatasetTable, + table2: DatasetTable, + mocker, +): + lf_mock = mocker.patch(f"{LF_CLIENT}.batch_revoke_permissions", return_value=True) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.remote_session", + return_value=boto3.Session(), + ) + + processor_cross_account.revoke_external_account_access_on_source_account() + # Then + lf_mock.assert_called_once() + + +def test_handle_share_failure( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + share_item_same_account: ShareObjectItem, + share_item_cross_account: ShareObjectItem, + table1: DatasetTable, + mocker, +): + + # Given + alarm_service_mock = mocker.patch.object(DatasetAlarmService, "trigger_table_sharing_failure_alarm") + error = Exception + + # When + processor_same_account.handle_share_failure(table1, share_item_same_account, error) + + # Then + alarm_service_mock.assert_called_once() + + # Reset mock + alarm_service_mock.reset_mock() + + # When + processor_cross_account.handle_share_failure(table1, share_item_cross_account, error) + + # Then + alarm_service_mock.assert_called_once() + + +def test_handle_revoke_failure( + db, + processor_same_account: ProcessLFSameAccountShare, + processor_cross_account: ProcessLFCrossAccountShare, + revoke_item_same_account: ShareObjectItem, + revoke_item_cross_account: ShareObjectItem, + table1: DatasetTable, + mocker, +): + # Given + alarm_service_mock = mocker.patch.object(DatasetAlarmService, "trigger_revoke_table_sharing_failure_alarm") + error = Exception + + # When + processor_same_account.handle_revoke_failure(table1, revoke_item_same_account, error) + + # Then + alarm_service_mock.assert_called_once() + + # Reset mock + alarm_service_mock.reset_mock() + + # When + processor_cross_account.handle_revoke_failure(table1, revoke_item_cross_account, error) + + # Then + alarm_service_mock.assert_called_once() diff --git a/tests/modules/datasets/tasks/test_s3_share_manager.py b/tests/modules/datasets/tasks/test_s3_share_manager.py new file mode 100644 index 000000000..febea47f9 --- /dev/null +++ b/tests/modules/datasets/tasks/test_s3_share_manager.py @@ -0,0 +1,1334 @@ +from unittest.mock import MagicMock + +import pytest +import json + +from typing import Callable + +from dataall.core.cognito_groups.db.cognito_group_models import Group +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.organizations.db.organization_models import Organization +from dataall.modules.dataset_sharing.aws.s3_client import S3ControlClient +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject, ShareObjectItem + +from dataall.modules.dataset_sharing.services.share_managers import S3ShareManager +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, Dataset + +SOURCE_ENV_ACCOUNT = "111111111111" +SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" + + +TARGET_ACCOUNT_ENV = "222222222222" +TARGET_ACCOUNT_ENV_ROLE_NAME = "dataall-ConsumersEnvironment-r71ucp4m" + + +@pytest.fixture(scope="module") +def source_environment(env: Callable, org_fixture: Organization, group: Group): + source_environment = env( + org=org_fixture, + account=SOURCE_ENV_ACCOUNT, + envname="source_environment", + owner=group.owner, + group=group.name, + role=SOURCE_ENV_ROLE_NAME, + ) + yield source_environment + + +@pytest.fixture(scope="module") +def source_environment_group(environment_group: Callable, source_environment: Environment, group: Group): + source_environment_group = environment_group(source_environment, group.name) + yield source_environment_group + + +@pytest.fixture(scope="module") +def target_environment(env: Callable, org_fixture: Organization, group2: Group): + target_environment = env( + org=org_fixture, + account=TARGET_ACCOUNT_ENV, + envname="target_environment", + owner=group2.owner, + group=group2.name, + role=TARGET_ACCOUNT_ENV_ROLE_NAME, + ) + yield target_environment + + +@pytest.fixture(scope="module") +def target_environment_group(environment_group: Callable, target_environment: Environment, group2: Group): + target_environment_group = environment_group(target_environment, group2.name) + yield target_environment_group + + +@pytest.fixture(scope="module") +def dataset1(create_dataset: Callable, org_fixture: Organization, source_environment: Environment): + dataset1 = create_dataset(org_fixture, source_environment, "dataset1") + yield dataset1 + + +@pytest.fixture(scope="module") +def location1(location: Callable, dataset1: Dataset) -> DatasetStorageLocation: + yield location(dataset1, "location1") + + +@pytest.fixture(scope="module") +def share1(share: Callable, dataset1: Dataset, + target_environment: Environment, + target_environment_group: EnvironmentGroup) -> ShareObject: + share1 = share(dataset1, target_environment, target_environment_group) + yield share1 + + +@pytest.fixture(scope="module") +def share_item_folder1(share_item_folder: Callable, share1: ShareObject, location1: DatasetStorageLocation): + share_item_folder1 = share_item_folder(share1, location1) + return share_item_folder1 + + +@pytest.fixture(scope="module") +def base_bucket_policy(): + bucket_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Deny", + "Principal": {"AWS": "*"}, + "Action": "s3:*", + "Resource": ["arn:aws:s3:::dataall-iris-test-120922-4s47wv71", "arn:aws:s3:::dataall-iris-test-120922-4s47wv71/*"], + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + { + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, + "Action": "s3:*", + "Resource": "arn:aws:s3:::dataall-iris-test-120922-4s47wv71", + }, + ], + } + return bucket_policy + + +@pytest.fixture(scope="module") +def admin_ap_delegation_bucket_policy(): + bucket_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Deny", + "Principal": {"AWS": "*"}, + "Action": "s3:*", + "Resource": ["arn:aws:s3:::dataall-iris-test-120922-4s47wv71", "arn:aws:s3:::dataall-iris-test-120922-4s47wv71/*"], + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + { + "Effect": "Allow", + "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, + "Action": "s3:*", + "Resource": "arn:aws:s3:::dataall-iris-test-120922-4s47wv71", + }, + { + "Sid": "AllowAllToAdmin", + "Effect": "Allow", + "Principal": "*", + "Action": "s3:*", + "Resource": ["arn:aws:s3:::bucket-name", "arn:aws:s3:::bucket-name/*"], + "Condition": {"StringLike": {"aws:userId": "11111"}}, + }, + ], + } + + return bucket_policy + + +def mock_s3_client(mocker): + mock_client = MagicMock() + mocker.patch( + 'dataall.modules.dataset_sharing.services.share_managers.s3_share_manager.S3Client', + mock_client + ) + mock_client.create_bucket_policy.return_value = None + return mock_client + + +def mock_s3_control_client(mocker): + mock_client = MagicMock() + mocker.patch( + 'dataall.modules.dataset_sharing.services.share_managers.s3_share_manager.S3ControlClient', + mock_client + ) + + mock_client.delete_bucket_access_point.return_value = None + mock_client.attach_access_point_policy.return_value = None + + # original call + mock_client.generate_access_point_policy_template.side_effect = \ + S3ControlClient.generate_access_point_policy_template + + return mock_client + + +def mock_kms_client(mocker): + mock_client = MagicMock() + mocker.patch( + 'dataall.modules.dataset_sharing.services.share_managers.s3_share_manager.KmsClient', + mock_client + ) + mock_client.put_key_policy.return_value = None + return mock_client + + +@pytest.fixture(scope="module") +def target_dataset_access_control_policy(request): + + iam_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:*"], + "Resource": [ + f"arn:aws:s3:::{request.param[0]}", + f"arn:aws:s3:::{request.param[0]}/*", + f"arn:aws:s3:datasetregion:{request.param[1]}:accesspoint/{request.param[2]}", + f"arn:aws:s3:datasetregion:{request.param[1]}:accesspoint/{request.param[2]}/*", + ], + } + ], + } + + return iam_policy + + +def test_manage_bucket_policy_no_policy( + mocker, + source_environment_group, + target_environment_group, + dataset1, + db, + share1: ShareObject, + share_item_folder1, + location1, + source_environment: Environment, + target_environment: Environment, + base_bucket_policy, +): + + # Given + bucket_policy = base_bucket_policy + s3_client = mock_s3_client(mocker) + s3_client().get_bucket_policy.return_value = json.dumps(bucket_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_delegation_role_arn", + return_value="arn:role", + ) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_ids", + return_value=[1, 2, 3], + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.manage_bucket_policy() + + created_bucket_policy = json.loads( + s3_client().create_bucket_policy.call_args.args[1] + ) + + # Then + print(f"Bucket policy generated {created_bucket_policy}") + + sid_list = [statement.get("Sid") for statement in + created_bucket_policy["Statement"] if statement.get("Sid")] + + assert "AllowAllToAdmin" in sid_list + assert "DelegateAccessToAccessPoint" in sid_list + + +def test_manage_bucket_policy_existing_policy( + mocker, + source_environment_group, + target_environment_group, + dataset1, + db, + share1: ShareObject, + share_item_folder1, + location1, + source_environment: Environment, + target_environment: Environment, + admin_ap_delegation_bucket_policy, +): + + # Given + bucket_policy = admin_ap_delegation_bucket_policy + s3_client = mock_s3_client(mocker) + + s3_client().get_bucket_policy.return_value = json.dumps(bucket_policy) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.manage_bucket_policy() + + # Then + s3_client.create_bucket_policy.assert_not_called() + + +@pytest.mark.parametrize("target_dataset_access_control_policy", + ([("bucketname", "aws_account_id", "access_point_name")]), + indirect=True) +def test_grant_target_role_access_policy_existing_policy_bucket_not_included( + mocker, + source_environment_group, + target_environment_group, + dataset1, + db, + share1: ShareObject, + share_item_folder1, + location1, + source_environment: Environment, + target_environment: Environment, + target_dataset_access_control_policy, +): + + # Given + iam_policy = target_dataset_access_control_policy + + mocker.patch( + "dataall.base.aws.iam.IAM.get_role_policy", + return_value=iam_policy, + ) + + iam_update_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.update_role_policy", + return_value=None, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.grant_target_role_access_policy() + + # Then + iam_update_role_policy_mock.assert_called() + + # Iam function is called with str from object so we transform back to object + policy_object = json.loads(iam_update_role_policy_mock.call_args.args[3]) + + # Assert that bucket_name is inside the resource array of policy object + assert location1.S3BucketName in ",".join(policy_object["Statement"][0]["Resource"]) + + +@pytest.mark.parametrize("target_dataset_access_control_policy", ([("dataset1", SOURCE_ENV_ACCOUNT, "test")]), indirect=True) +def test_grant_target_role_access_policy_existing_policy_bucket_included( + mocker, + source_environment_group, + target_environment_group, + dataset1, + db, + share1: ShareObject, + share_item_folder1, + location1, + source_environment: Environment, + target_environment: Environment, + target_dataset_access_control_policy, +): + + # Given + iam_policy = target_dataset_access_control_policy + + mocker.patch( + "dataall.base.aws.iam.IAM.get_role_policy", + return_value=iam_policy, + ) + + iam_update_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.update_role_policy", + return_value=None, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.grant_target_role_access_policy() + + # Then + iam_update_role_policy_mock.assert_not_called() + + +def test_grant_target_role_access_policy_test_no_policy( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + + # Given + mocker.patch( + "dataall.base.aws.iam.IAM.get_role_policy", + return_value=None, + ) + + iam_update_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.update_role_policy", + return_value=None, + ) + + expected_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:*"], + "Resource": [ + f"arn:aws:s3:::{location1.S3BucketName}", + f"arn:aws:s3:::{location1.S3BucketName}/*", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{share_item_folder1.S3AccessPointName}", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{share_item_folder1.S3AccessPointName}/*", + ], + } + ], + } + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.grant_target_role_access_policy() + + # Then + iam_update_role_policy_mock.assert_called_with( + target_environment.AwsAccountId, share1.principalIAMRoleName, + "targetDatasetAccessControlPolicy", json.dumps(expected_policy) + ) + + +def test_update_dataset_bucket_key_policy_with_env_admin( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + kms_client = mock_kms_client(mocker) + kms_client().get_key_id.return_value = None + + existing_key_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": f"{target_environment.SamlGroupName}", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, + } + ], + } + + kms_client().get_key_policy.return_value = json.dumps(existing_key_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.update_dataset_bucket_key_policy() + + # Then + kms_client().put_key_policy.assert_not_called() + + +def _generate_ap_policy_object( + access_point_arn: str, + env_admin_prefix_list: list, +): + new_ap_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowAllToAdmin", + "Effect": "Allow", + "Principal": "*", + "Action": "s3:*", + "Resource": "access-point-arn", + "Condition": {"StringLike": {"aws:userId": ["dataset_admin_role_id:*", "source_env_admin_role_id:*", "source_account_pivot_role_id:*"]}}, + }, + ], + } + + for statement in env_admin_prefix_list: + first_half = { + "Sid": f"{statement[0]}0", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "s3:ListBucket", + "Resource": f"{access_point_arn}", + "Condition": {"StringLike": {"s3:prefix": [], "aws:userId": [f"{statement[0]}"]}}, + } + second_half = { + "Sid": f"{statement[0]}1", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "s3:GetObject", + "Resource": [], + "Condition": {"StringLike": {"aws:userId": [f"{statement[0]}:*"]}}, + } + prefix_list = [] + for prefix in statement[1]: + prefix_list.append(f"{prefix}/*") + second_half["Resource"].append(f"{access_point_arn}/object/{prefix}/*") + + if len(prefix_list) > 1: + first_half["Condition"]["StringLike"]["s3:prefix"] = prefix_list + else: + first_half["Condition"]["StringLike"]["s3:prefix"] = prefix_list[0] + + new_ap_policy["Statement"].append(first_half) + new_ap_policy["Statement"].append(second_half) + + return new_ap_policy + + +def test_update_dataset_bucket_key_policy_without_env_admin( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + kms_client = mock_kms_client(mocker) + kms_client().get_key_id.return_value = "kms-key" + + existing_key_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "different_env_admin_id", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": "different_env_admin_id:*"}}, + } + ], + } + + kms_client().get_key_policy.return_value = json.dumps(existing_key_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + new_key_policy = { + "Sid": f"{target_environment.SamlGroupName}", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, + } + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.update_dataset_bucket_key_policy() + + existing_key_policy["Statement"].append(new_key_policy) + + expected_complete_key_policy = existing_key_policy + + # Then + kms_client().put_key_policy.assert_called_with("kms-key", json.dumps(expected_complete_key_policy)) + + +# NO existing Access point and ap policy +def test_manage_access_point_and_policy_1( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + access_point_arn = "new-access-point-arn" + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().create_bucket_access_point.return_value = access_point_arn + s3_control_client().get_bucket_access_point_arn.return_value = access_point_arn + s3_control_client().get_access_point_policy.return_value = None + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_ids", + return_value=["dataset_admin_role_id:*", "source_env_admin_role_id:*" "source_account_pivot_role_id:*"], + ) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_delegation_role_arn", + return_value=None, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.manage_access_point_and_policy() + + # Then + s3_control_client().attach_access_point_policy.assert_called() + policy = s3_control_client().attach_access_point_policy.call_args.kwargs.get('policy') + new_ap_policy = json.loads(policy) + + # Asser that access point is in resource + assert new_ap_policy["Statement"][0]["Resource"] == access_point_arn + + # Assert that listbucket and getobject permissions were added for target environment admin + assert "s3:GetObject" in [ + statement["Action"] for statement in new_ap_policy["Statement"] if statement["Sid"].startswith(target_environment.SamlGroupName) + ] + assert "s3:ListBucket" in [ + statement["Action"] for statement in new_ap_policy["Statement"] if statement["Sid"].startswith(target_environment.SamlGroupName) + ] + + # Assert AllowAllToAdmin "Sid" exists + assert len([statement for statement in new_ap_policy["Statement"] if statement["Sid"] == "AllowAllToAdmin"]) > 0 + + +# Existing Access point and ap policy +# target_env_admin is already in policy +# current folder is NOT yet in prefix_list +def test_manage_access_point_and_policy_2( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + + # Existing access point + access_point_arn = "existing-access-point-arn" + s3_client = mock_s3_control_client(mocker) + s3_client().get_bucket_access_point_arn.return_value = access_point_arn + + # target_env_admin is already in policy but current folder is NOT yet in prefix_list + existing_ap_policy = _generate_ap_policy_object(access_point_arn, [[target_environment.SamlGroupName, ["existing-prefix"]]]) + + # Existing access point policy + s3_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.manage_access_point_and_policy() + + # Then + s3_client().attach_access_point_policy.assert_called() + policy = s3_client().attach_access_point_policy.call_args.kwargs.get('policy') + + # Assert S3 Prefix of share folder in prefix_list + new_ap_policy = json.loads(policy) + statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} + prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] + + assert f"{location1.S3Prefix}/*" in prefix_list + + # Assert s3 prefix is in resource_list + resource_list = statements[f"{target_environment.SamlGroupName}1"]["Resource"] + + assert f"{access_point_arn}/object/{location1.S3Prefix}/*" in resource_list + + +# Existing Access point and ap policy +# target_env_admin is NOT already in ap policy +# current folder is NOT yet in prefix_list +def test_manage_access_point_and_policy_3( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + + # Existing access point + access_point_arn = "existing-access-point-arn" + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().get_bucket_access_point_arn.return_value = access_point_arn + + # New target env admin and prefix are not in existing ap policy + existing_ap_policy = _generate_ap_policy_object(access_point_arn, [["another-env-admin", ["existing-prefix"]]]) + + # Existing access point policy + s3_control_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.manage_access_point_and_policy() + + # Then + s3_control_client().attach_access_point_policy.assert_called() + + # Assert S3 Prefix of share folder in prefix_list + policy = s3_control_client().attach_access_point_policy.call_args.kwargs.get('policy') + new_ap_policy = json.loads(policy) + statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} + prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] + + assert f"{location1.S3Prefix}/*" in prefix_list + + # Assert s3 prefix is in resource_list + resource_list = statements[f"{target_environment.SamlGroupName}1"]["Resource"] + + assert f"{access_point_arn}/object/{location1.S3Prefix}/*" in resource_list + + +def test_delete_access_point_policy_with_env_admin_one_prefix( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + + # Existing access point + access_point_arn = "existing-access-point-arn" + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().get_bucket_access_point_arn.return_value = access_point_arn + + # New target env admin and prefix are already in existing ap policy + # Another admin is part of this policy + existing_ap_policy = _generate_ap_policy_object( + access_point_arn, + [[target_environment.SamlGroupName, [location1.S3Prefix]], ["another-env-admin", [location1.S3Prefix]]], + ) + + s3_control_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_access_point_policy() + + # Then + s3_control_client().attach_access_point_policy.assert_called() + + # Assert statements for share have been removed + new_ap_policy = json.loads(s3_control_client().attach_access_point_policy.call_args.kwargs.get('policy')) + deleted_statements = {item["Sid"]: item for item in new_ap_policy["Statement"] if item["Sid"].startswith(f"{target_environment.SamlGroupName}")} + + assert len(deleted_statements) == 0 + + # Assert other statements are remaining + remaining_statements = {item["Sid"]: item for item in new_ap_policy["Statement"] if not item["Sid"].startswith(f"{target_environment.SamlGroupName}")} + + assert len(remaining_statements) > 0 + + +def test_delete_access_point_policy_with_env_admin_multiple_prefix( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + + access_point_arn = "existing-access-point-arn" + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().get_bucket_access_point_arn.return_value = access_point_arn + + existing_ap_policy = _generate_ap_policy_object( + access_point_arn, + [[target_environment.SamlGroupName, [location1.S3Prefix, "another-prefix"]], ["another-env-admin", [location1.S3Prefix]]], + ) + + s3_control_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_access_point_policy() + + # Then + s3_control_client().attach_access_point_policy.assert_called() + + # Assert statements for share have been removed + new_ap_policy = json.loads(s3_control_client().attach_access_point_policy.call_args.kwargs.get('policy')) + statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} + + remaining_prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] + + assert location1.S3Prefix not in remaining_prefix_list + assert "another-prefix/*" in remaining_prefix_list + + +def test_dont_delete_access_point_with_policy( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + existing_ap_policy = _generate_ap_policy_object("access-point-arn", [[target_environment.SamlGroupName, ["existing-prefix"]]]) + + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + # When + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + is_deleted = manager.delete_access_point(share1, dataset1) + + # Then + assert not is_deleted + assert not s3_control_client().delete_bucket_access_point.called + + +def test_delete_access_point_without_policy( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given ap policy that only includes AllowAllToAdminStatement + existing_ap_policy = _generate_ap_policy_object("access-point-arn", []) + + s3_control_client = mock_s3_control_client(mocker) + s3_control_client().get_access_point_policy.return_value = json.dumps(existing_ap_policy) + s3_control_client().delete_bucket_access_point.return_value = None + + # When + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + is_deleted = manager.delete_access_point(share1, dataset1) + + # Then + assert is_deleted + assert s3_control_client().delete_bucket_access_point.called + + +def test_delete_target_role_access_policy_no_remaining_statement( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given ap policy that only includes AllowAllToAdminStatement + existing_target_role_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:*"], + "Resource": [ + f"arn:aws:s3:::{location1.S3BucketName}", + f"arn:aws:s3:::{location1.S3BucketName}/*", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}/*", + ], + } + ], + } + + mocker.patch( + "dataall.base.aws.iam.IAM.get_role_policy", + return_value=existing_target_role_policy, + ) + + iam_delete_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.delete_role_policy", + return_value=None, + ) + + iam_update_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.update_role_policy", + return_value=None, + ) + + # When + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_target_role_access_policy(share1, dataset1, target_environment) + + # Then + iam_delete_role_policy_mock.assert_called() + iam_update_role_policy_mock.assert_not_called() + + +def test_delete_target_role_access_policy_with_remaining_statement( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + # target role policy that has a bucket unrelated to the current bucket to be deleted + existing_target_role_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:*"], + "Resource": [ + "arn:aws:s3:::UNRELATED_BUCKET_ARN", + f"arn:aws:s3:::{location1.S3BucketName}", + f"arn:aws:s3:::{location1.S3BucketName}/*", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}", + f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}/*", + ], + } + ], + } + + expected_remaining_target_role_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": ["s3:*"], + "Resource": ["arn:aws:s3:::UNRELATED_BUCKET_ARN"], + } + ], + } + + mocker.patch( + "dataall.base.aws.iam.IAM.get_role_policy", + return_value=existing_target_role_policy, + ) + + iam_delete_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.delete_role_policy", + return_value=None, + ) + + iam_update_role_policy_mock = mocker.patch( + "dataall.base.aws.iam.IAM.update_role_policy", + return_value=None, + ) + + # When + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_target_role_access_policy(share1, dataset1, target_environment) + + # Then + iam_delete_role_policy_mock.assert_not_called() + + iam_update_role_policy_mock.assert_called_with( + target_environment.AwsAccountId, + share1.principalIAMRoleName, + "targetDatasetAccessControlPolicy", + json.dumps(expected_remaining_target_role_policy), + ) + + +# The kms key policy includes the target env admin to be removed aswell as one additional target env +# admin, that should remain +def test_delete_dataset_bucket_key_policy_existing_policy_with_additional_target_env( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + kms_client = mock_kms_client(mocker) + kms_client().get_key_id.return_value = "1" + + # Includes target env admin to be removed and another, that should remain + existing_key_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": f"{target_environment.SamlGroupName}", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, + }, + { + "Sid": "REMAINING_TARGET_ENV_ADMIN_ID", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": "REMAINING_TARGET_ENV_ADMIN_ID:*"}}, + }, + ], + } + + remaining_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "REMAINING_TARGET_ENV_ADMIN_ID", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": "REMAINING_TARGET_ENV_ADMIN_ID:*"}}, + } + ], + } + + kms_client().get_key_policy.return_value = json.dumps(existing_key_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_dataset_bucket_key_policy(share1, dataset1, target_environment) + + # Then + kms_client().put_key_policy.assert_called() + kms_client().put_key_policy.assert_called_with( + kms_client().get_key_id.return_value, + json.dumps(remaining_policy) + ) + + +# The kms key policy only includes the target env admin +def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_target_env( + mocker, + source_environment_group: EnvironmentGroup, + target_environment_group: EnvironmentGroup, + dataset1: Dataset, + db, + share1: ShareObject, + share_item_folder1: ShareObjectItem, + location1: DatasetStorageLocation, + source_environment: Environment, + target_environment: Environment, +): + # Given + kms_client = mock_kms_client(mocker) + kms_client().get_key_id.return_value = "1" + + # Includes target env admin to be removed and another, that should remain + existing_key_policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": f"{target_environment.SamlGroupName}", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": "kms:Decrypt", + "Resource": "*", + "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, + } + ], + } + + remaining_policy = { + "Version": "2012-10-17", + "Statement": [], + } + + kms_client().get_key_policy.return_value = json.dumps(existing_key_policy) + + mocker.patch( + "dataall.base.aws.sts.SessionHelper.get_role_id", + return_value=target_environment.SamlGroupName, + ) + + with db.scoped_session() as session: + manager = S3ShareManager( + session, + dataset1, + share1, + location1, + source_environment, + target_environment, + source_environment_group, + target_environment_group, + ) + + # When + manager.delete_dataset_bucket_key_policy(share1, dataset1, target_environment) + + # Then + kms_client().put_key_policy.assert_called() + kms_client().put_key_policy.assert_called_with( + kms_client().get_key_id.return_value, + json.dumps(remaining_policy) + ) diff --git a/tests/modules/datasets/tasks/test_stacks_updater_with_datasets.py b/tests/modules/datasets/tasks/test_stacks_updater_with_datasets.py new file mode 100644 index 000000000..3b479af3e --- /dev/null +++ b/tests/modules/datasets/tasks/test_stacks_updater_with_datasets.py @@ -0,0 +1,18 @@ +import pytest +from dataall.modules.datasets_base.db.dataset_models import Dataset +from dataall.core.environment.tasks.env_stacks_updater import update_stacks + + +@pytest.fixture(scope='module', autouse=True) +def sync_dataset(create_dataset, org_fixture, env_fixture, db): + yield create_dataset(org_fixture, env_fixture, 'dataset') + + +def test_stacks_update(db, org, env, sync_dataset, mocker): + mocker.patch( + 'dataall.core.environment.tasks.env_stacks_updater.update_stack', + return_value=True, + ) + envs, datasets = update_stacks(engine=db, envname='local') + assert envs == 1 + assert datasets == 1 diff --git a/tests/api/test_dataset.py b/tests/modules/datasets/test_dataset.py similarity index 80% rename from tests/api/test_dataset.py rename to tests/modules/datasets/test_dataset.py index 0cb824fd1..b6730b853 100644 --- a/tests/api/test_dataset.py +++ b/tests/modules/datasets/test_dataset.py @@ -1,58 +1,37 @@ import typing +from unittest.mock import MagicMock import pytest -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org1(org, user, group, tenant): - org1 = org('testorg', user.userName, group.name) - yield org1 - - -@pytest.fixture(scope='module', autouse=True) -def env1(env, org1, user, group, tenant): - env1 = env(org1, 'dev', 'alice', 'testadmins', '111111111111', 'eu-west-1') - yield env1 - - -@pytest.fixture(scope='module') -def dataset1(env1, org1, dataset, group) -> dataall.db.models.Dataset: - yield dataset( - org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name - ) - - -@pytest.fixture(scope='module') -def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: - yield org('org2', user2.userName, group2.name) - - -@pytest.fixture(scope='module') -def env2( - env: typing.Callable, org2: dataall.db.models.Organization, user2, group2, tenant -) -> dataall.db.models.Environment: - yield env(org2, 'dev', user2.userName, group2.name, '2' * 12, 'eu-west-2') - - -def test_init(db): - assert True +from dataall.core.environment.db.environment_models import Environment +from dataall.core.organizations.db.organization_models import Organization +from dataall.modules.datasets_base.db.dataset_repositories import DatasetRepository +from dataall.modules.datasets_base.db.dataset_models import DatasetStorageLocation, DatasetTable, Dataset +from tests.core.stacks.test_stack import update_stack_query @pytest.fixture(scope='module') def dataset1( - org1: dataall.db.models.Organization, - env1: dataall.db.models.Environment, + module_mocker, + org_fixture: Organization, + env_fixture: Environment, dataset: typing.Callable, group, -) -> dataall.db.models.Dataset: - d = dataset(org=org1, env=env1, name='dataset1', owner=env1.owner, group=group.name) +) -> Dataset: + kms_client = MagicMock() + module_mocker.patch( + 'dataall.modules.datasets.services.dataset_service.KmsClient', + kms_client + ) + + kms_client().get_key_id.return_value = {"some_key"} + + d = dataset(org=org_fixture, env=env_fixture, name='dataset1', owner=env_fixture.owner, group=group.name) print(d) yield d -def test_get_dataset(client, dataset1, env1, group): +def test_get_dataset(client, dataset1, env_fixture, group): response = client.query( """ query GetDataset($datasetUri:String!){ @@ -72,8 +51,8 @@ def test_get_dataset(client, dataset1, env1, group): username='alice', groups=[group.name], ) - assert response.data.getDataset.AwsAccountId == env1.AwsAccountId - assert response.data.getDataset.region == env1.region + assert response.data.getDataset.AwsAccountId == env_fixture.AwsAccountId + assert response.data.getDataset.region == env_fixture.region assert response.data.getDataset.label == 'dataset1' assert response.data.getDataset.imported is False assert response.data.getDataset.importedS3Bucket is False @@ -101,10 +80,6 @@ def test_list_datasets(client, dataset1, group): def test_update_dataset(dataset1, client, group, group2, module_mocker): - module_mocker.patch( - 'dataall.aws.handlers.kms.KMS.get_key_id', - return_value={"some_key"}, - ) response = client.query( """ mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ @@ -178,10 +153,9 @@ def test_update_dataset(dataset1, client, group, group2, module_mocker): assert response.data.updateDataset.confidentiality == 'Official' -def test_start_crawler(org1, env1, dataset1, client, group, module_mocker): +def test_start_crawler(org_fixture, env_fixture, dataset1, client, group, module_mocker): module_mocker.patch( - 'dataall.aws.handlers.glue.Glue.get_glue_crawler', - return_value={'crawler_name': dataset1.GlueCrawlerName}, + 'dataall.modules.datasets.services.dataset_service.DatasetCrawler', MagicMock() ) mutation = """ mutation StartGlueCrawler($datasetUri:String, $input:CrawlerInput){ @@ -231,7 +205,7 @@ def test_add_tables(table, dataset1, db): table(dataset=dataset1, name=f'table{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetTable).count() + nb = session.query(DatasetTable).count() assert nb == 10 @@ -240,7 +214,7 @@ def test_add_locations(location, dataset1, db): location(dataset=dataset1, name=f'unstructured{i+1}', username=dataset1.owner) with db.scoped_session() as session: - nb = session.query(dataall.db.models.DatasetStorageLocation).count() + nb = session.query(DatasetStorageLocation).count() assert nb == 10 @@ -344,7 +318,7 @@ def test_list_dataset_tables(client, dataset1, group): assert len(response.data.getDataset.tables.nodes) == 2 -def test_dataset_in_environment(client, env1, dataset1, group): +def test_dataset_in_environment(client, env_fixture, dataset1, group): q = """ query ListDatasetsCreatedInEnvironment($environmentUri:String!){ listDatasetsCreatedInEnvironment(environmentUri:$environmentUri){ @@ -356,7 +330,7 @@ def test_dataset_in_environment(client, env1, dataset1, group): } """ response = client.query( - q, username=env1.owner, groups=[group.name], environmentUri=env1.environmentUri + q, username=env_fixture.owner, groups=[group.name], environmentUri=env_fixture.environmentUri ) assert response.data.listDatasetsCreatedInEnvironment.count == 1 assert ( @@ -365,15 +339,12 @@ def test_dataset_in_environment(client, env1, dataset1, group): ) -def test_delete_dataset(client, dataset, env1, org1, db, module_mocker, group, user): +def test_delete_dataset(client, dataset, env_fixture, org_fixture, db, module_mocker, group, user): with db.scoped_session() as session: - session.query(dataall.db.models.Dataset).delete() + session.query(Dataset).delete() session.commit() deleted_dataset = dataset( - org=org1, env=env1, name='dataset1', owner=user.userName, group=group.name - ) - module_mocker.patch( - 'dataall.aws.handlers.service_handlers.Worker.queue', return_value=True + org=org_fixture, env=env_fixture, name='dataset1', owner=user.username, group=group.name ) response = client.query( """ @@ -383,7 +354,7 @@ def test_delete_dataset(client, dataset, env1, org1, db, module_mocker, group, u """, datasetUri=deleted_dataset.datasetUri, deleteFromAWS=True, - username=user.userName, + username=user.username, groups=[group.name], ) assert response @@ -399,7 +370,7 @@ def test_delete_dataset(client, dataset, env1, org1, db, module_mocker, group, u } """, datasetUri=deleted_dataset.datasetUri, - username=user.userName, + username=user.username, groups=[group.name], ) assert response.data.getDataset is None @@ -417,13 +388,13 @@ def test_delete_dataset(client, dataset, env1, org1, db, module_mocker, group, u } """, filter=None, - username=user.userName, + username=user.username, groups=[group.name], ) assert response.data.listDatasets.count == 0 -def test_import_dataset(org1, env1, dataset1, client, group): +def test_import_dataset(org_fixture, env_fixture, dataset1, client, group): response = client.query( """ mutation importDataset($input:ImportDatasetInput){ @@ -446,8 +417,8 @@ def test_import_dataset(org1, env1, dataset1, client, group): username=dataset1.owner, groups=[group.name], input={ - 'organizationUri': org1.organizationUri, - 'environmentUri': env1.environmentUri, + 'organizationUri': org_fixture.organizationUri, + 'environmentUri': env_fixture.environmentUri, 'label': 'datasetImported', 'bucketName': 'dhimportedbucket', 'glueDatabaseName': 'dhimportedGlueDB', @@ -458,8 +429,8 @@ def test_import_dataset(org1, env1, dataset1, client, group): }, ) assert response.data.importDataset.label == 'datasetImported' - assert response.data.importDataset.AwsAccountId == env1.AwsAccountId - assert response.data.importDataset.region == env1.region + assert response.data.importDataset.AwsAccountId == env_fixture.AwsAccountId + assert response.data.importDataset.region == env_fixture.region assert response.data.importDataset.imported is True assert response.data.importDataset.importedS3Bucket is True assert response.data.importDataset.importedGlueDatabase is True @@ -471,16 +442,16 @@ def test_import_dataset(org1, env1, dataset1, client, group): assert 'dhimportedRole' in response.data.importDataset.IAMDatasetAdminRoleArn -def test_get_dataset_by_prefix(db, env1, org1): +def test_get_dataset_by_prefix(db, env_fixture, org_fixture): with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( + dataset = Dataset( label='thisdataset', - environmentUri=env1.environmentUri, - organizationUri=org1.organizationUri, + environmentUri=env_fixture.environmentUri, + organizationUri=org_fixture.organizationUri, name='thisdataset', description='test', - AwsAccountId=env1.AwsAccountId, - region=env1.region, + AwsAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, S3BucketName='insite-data-lake-raw-alpha-eu-west-1', GlueDatabaseName='db', IAMDatasetAdminRoleArn='role', @@ -494,7 +465,7 @@ def test_get_dataset_by_prefix(db, env1, org1): ) session.add(dataset) session.commit() - dataset_found: dataall.db.models.Dataset = dataall.db.api.Dataset.get_dataset_by_bucket_name( + dataset_found: Dataset = DatasetRepository.get_dataset_by_bucket_name( session, bucket='s3a://insite-data-lake-raw-alpha-eu-west-1/booker/volume_constraints/insite_version=1/volume_constraints.delta'.split( '//' @@ -509,7 +480,7 @@ def test_get_dataset_by_prefix(db, env1, org1): assert dataset_found.S3BucketName == 'insite-data-lake-raw-alpha-eu-west-1' -def test_stewardship(client, dataset, env1, org1, db, group2, group, user, patch_es): +def test_stewardship(client, dataset, env_fixture, org_fixture, db, group2, group, user, patch_es): response = client.query( """ mutation CreateDataset($input:NewDatasetInput){ @@ -532,18 +503,24 @@ def test_stewardship(client, dataset, env1, org1, db, group2, group, user, patch } } """, - username=user.userName, + username=user.username, groups=[group.name], input={ - 'owner': user.userName, + 'owner': user.username, 'label': f'stewardsds', 'description': 'test dataset {name}', 'businessOwnerEmail': 'jeff@amazon.com', 'tags': ['t1', 't2'], - 'environmentUri': env1.environmentUri, + 'environmentUri': env_fixture.environmentUri, 'SamlAdminGroupName': group.name, 'stewards': group2.name, - 'organizationUri': org1.organizationUri, + 'organizationUri': org_fixture.organizationUri, }, ) assert response.data.createDataset.stewards == group2.name + + +def test_dataset_stack(client, dataset_fixture, group): + dataset = dataset_fixture + response = update_stack_query(client, dataset.datasetUri, 'dataset', dataset.SamlAdminGroupName) + assert response.data.updateStack.targetUri == dataset.datasetUri diff --git a/tests/modules/datasets/test_dataset_count_votes.py b/tests/modules/datasets/test_dataset_count_votes.py new file mode 100644 index 000000000..540ccb5cd --- /dev/null +++ b/tests/modules/datasets/test_dataset_count_votes.py @@ -0,0 +1,38 @@ +from tests.modules.vote.test_vote import * + + +def test_count_votes(client, dataset_fixture): + response = count_votes_query( + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName + ) + assert response.data.countUpVotes == 0 + + +def test_upvote(patch_es, client, dataset_fixture): + response = upvote_mutation( + client, dataset_fixture.datasetUri, 'dataset', True, dataset_fixture.SamlAdminGroupName + ) + assert response.data.upVote.upvote + response = count_votes_query( + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName + ) + assert response.data.countUpVotes == 1 + response = get_vote_query( + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName + ) + assert response.data.getVote.upvote + + response = upvote_mutation( + client, dataset_fixture.datasetUri, 'dataset', False, dataset_fixture.SamlAdminGroupName + ) + assert not response.data.upVote.upvote + + response = get_vote_query( + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName + ) + assert not response.data.getVote.upvote + + response = count_votes_query( + client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName + ) + assert response.data.countUpVotes == 0 diff --git a/tests/modules/datasets/test_dataset_feed.py b/tests/modules/datasets/test_dataset_feed.py new file mode 100644 index 000000000..64a0c5ba1 --- /dev/null +++ b/tests/modules/datasets/test_dataset_feed.py @@ -0,0 +1,11 @@ + +from dataall.modules.feed.api.registry import FeedRegistry +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn + + +def test_dataset_registered(): + model = FeedRegistry.find_model("DatasetTableColumn") + assert model == DatasetTableColumn + + model = DatasetTableColumn() + assert "DatasetTableColumn" == FeedRegistry.find_target(model) diff --git a/tests/modules/datasets/test_dataset_glossary.py b/tests/modules/datasets/test_dataset_glossary.py new file mode 100644 index 000000000..393c317e8 --- /dev/null +++ b/tests/modules/datasets/test_dataset_glossary.py @@ -0,0 +1,160 @@ +from typing import List + +from dataall.modules.catalog.db.glossary_models import TermLink +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn +from tests.modules.catalog.test_glossary import * + + +@pytest.fixture(scope='module', autouse=True) +def _columns(db, dataset_fixture, table_fixture) -> List[DatasetTableColumn]: + with db.scoped_session() as session: + cols = [] + for i in range(0, 10): + c = DatasetTableColumn( + datasetUri=dataset_fixture.datasetUri, + tableUri=table_fixture.tableUri, + label=f'c{i+1}', + AWSAccountId=dataset_fixture.AwsAccountId, + region=dataset_fixture.region, + GlueTableName='table', + typeName='String', + owner='user', + GlueDatabaseName=dataset_fixture.GlueDatabaseName, + ) + session.add(c) + cols.append(c) + yield cols + + +def test_dataset_link_term(client, t1, _columns, group): + col = _columns[0] + r = client.query( + """ + mutation LinkTerm( + $nodeUri:String!, + $targetUri:String!, + $targetType:String!, + ){ + linkTerm( + nodeUri:$nodeUri, + targetUri:$targetUri, + targetType:$targetType + ){ + linkUri + } + } + """, + nodeUri=t1.nodeUri, + targetUri=col.columnUri, + targetType='Column', + username='alice', + groups=[group.name], + ) + link_uri = r.data.linkTerm.linkUri + + r = client.query( + """ + query GetGlossaryTermLink($linkUri:String!){ + getGlossaryTermLink(linkUri:$linkUri){ + linkUri + created + target{ + __typename + ... on DatasetTableColumn{ + label + columnUri + } + } + } + } + """, + linkUri=link_uri, + username='alice', + ) + print(r) + + +def test_dataset_term_link_approval(db, client, t1, dataset_fixture, user, group): + response = client.query( + """ + mutation UpdateDataset($datasetUri:String!,$input:ModifyDatasetInput){ + updateDataset(datasetUri:$datasetUri,input:$input){ + datasetUri + label + tags + } + } + """, + username='alice', + groups=[group.name], + datasetUri=dataset_fixture.datasetUri, + input={ + 'terms': [t1.nodeUri], + 'KmsAlias': '' + }, + ) + with db.scoped_session() as session: + link: TermLink = ( + session.query(TermLink) + .filter(TermLink.nodeUri == t1.nodeUri) + .first() + ) + r = client.query( + """ + mutation ApproveTermAssociation($linkUri:String!){ + approveTermAssociation(linkUri:$linkUri) + } + """, + linkUri=link.linkUri, + username='alice', + groups=[group.name], + ) + assert r + link: TermLink = session.query(TermLink).get(link.linkUri) + assert link.approvedBySteward + + r = client.query( + """ + mutation DismissTermAssociation($linkUri:String!){ + dismissTermAssociation(linkUri:$linkUri) + } + """, + linkUri=link.linkUri, + username='alice', + groups=[group.name], + ) + assert r + link: TermLink = session.query(TermLink).get(link.linkUri) + assert not link.approvedBySteward + + +def test_get_column_term_associations(t1, db, client): + r = client.query( + """ + query GetTerm($nodeUri:String!){ + getTerm(nodeUri:$nodeUri){ + nodeUri + label + readme + associations{ + count + nodes{ + linkUri + target{ + ... on DatasetTableColumn{ + label + columnUri + } + } + } + } + } + + } + """, + nodeUri=t1.nodeUri, + username='alice', + ) + assert r.data.getTerm.nodeUri == t1.nodeUri + assert r.data.getTerm.label == t1.label + assert r.data.getTerm.readme == t1.readme diff --git a/tests/modules/datasets/test_dataset_indexers.py b/tests/modules/datasets/test_dataset_indexers.py new file mode 100644 index 000000000..1f1610946 --- /dev/null +++ b/tests/modules/datasets/test_dataset_indexers.py @@ -0,0 +1,50 @@ +from dataall.modules.datasets.indexers.location_indexer import DatasetLocationIndexer +from dataall.modules.datasets.indexers.table_indexer import DatasetTableIndexer +from dataall.modules.datasets.indexers.dataset_indexer import DatasetIndexer + + +def test_es_request(): + body = '{"preference":"SearchResult"}\n{"query":{"match_all":{}},"size":8,"_source":{"includes":["*"],"excludes":[]},"from":0}\n' + body = body.split('\n') + assert ( + body[1] + == '{"query":{"match_all":{}},"size":8,"_source":{"includes":["*"],"excludes":[]},"from":0}' + ) + import json + + assert json.loads(body[1]) == { + 'query': {'match_all': {}}, + 'size': 8, + '_source': {'includes': ['*'], 'excludes': []}, + 'from': 0, + } + + +def test_upsert_dataset(db, dataset_fixture, env): + with db.scoped_session() as session: + dataset_indexed = DatasetIndexer.upsert( + session, dataset_uri=dataset_fixture.datasetUri + ) + assert dataset_indexed.datasetUri == dataset_fixture.datasetUri + + +def test_upsert_table(db, dataset_fixture, table_fixture): + with db.scoped_session() as session: + table_indexed = DatasetTableIndexer.upsert(session, table_uri=table_fixture.tableUri) + assert table_indexed.tableUri == table_fixture.tableUri + + +def test_upsert_folder(db, dataset_fixture, folder_fixture): + with db.scoped_session() as session: + folder_indexed = DatasetLocationIndexer.upsert( + session=session, folder_uri=folder_fixture.locationUri + ) + assert folder_indexed.locationUri == folder_fixture.locationUri + + +def test_upsert_tables(db, dataset_fixture, folder_fixture): + with db.scoped_session() as session: + tables = DatasetTableIndexer.upsert_all( + session, dataset_uri=dataset_fixture.datasetUri + ) + assert len(tables) == 1 diff --git a/tests/modules/datasets/test_dataset_key_value_tag.py b/tests/modules/datasets/test_dataset_key_value_tag.py new file mode 100644 index 000000000..7ad81644f --- /dev/null +++ b/tests/modules/datasets/test_dataset_key_value_tag.py @@ -0,0 +1,33 @@ +from tests.core.stacks.test_keyvaluetag import update_key_value_tags, list_tags_query + + +def list_dataset_tags_query(client, dataset_fixture): + return list_tags_query(client, dataset_fixture.datasetUri, 'dataset', dataset_fixture.SamlAdminGroupName) + + +def test_empty_key_value_tags(client, dataset_fixture): + response = list_dataset_tags_query(client, dataset_fixture) + print(response) + assert len(response.data.listKeyValueTags) == 0 + + +def test_update_key_value_tags(client, dataset_fixture): + tags = [{'key': 'tag1', 'value': 'value1', 'cascade': False}] + response = update_key_value_tags( + client, dataset_fixture.datasetUri, 'dataset', tags, dataset_fixture.SamlAdminGroupName + ) + + assert len(response.data.updateKeyValueTags) == 1 + + response = list_dataset_tags_query(client, dataset_fixture) + assert response.data.listKeyValueTags[0].key == 'tag1' + assert response.data.listKeyValueTags[0].value == 'value1' + assert not response.data.listKeyValueTags[0].cascade + + response = update_key_value_tags( + client, dataset_fixture.datasetUri, 'dataset', [], dataset_fixture.SamlAdminGroupName + ) + assert len(response.data.updateKeyValueTags) == 0 + + response = list_dataset_tags_query(client, dataset_fixture) + assert len(response.data.listKeyValueTags) == 0 diff --git a/tests/modules/datasets/test_dataset_location.py b/tests/modules/datasets/test_dataset_location.py new file mode 100644 index 000000000..d06cb9628 --- /dev/null +++ b/tests/modules/datasets/test_dataset_location.py @@ -0,0 +1,115 @@ +from unittest.mock import MagicMock +import pytest + +from dataall.modules.datasets_base.db.dataset_models import Dataset + +@pytest.fixture(scope='module') +def dataset1(env_fixture, org_fixture, dataset, group) -> Dataset: + yield dataset( + org=org_fixture, env=env_fixture, name='dataset1', owner=env_fixture.owner, group=group.name + ) + + +def test_create_location(client, dataset1, user, group, patch_es, module_mocker): + mock_client = MagicMock() + module_mocker.patch("dataall.modules.datasets.services.dataset_location_service.S3LocationClient", mock_client) + response = client.query( + """ + mutation createDatasetStorageLocation($datasetUri:String!, $input:NewDatasetStorageLocationInput!){ + createDatasetStorageLocation(datasetUri:$datasetUri, input:$input){ + locationUri + S3Prefix + label + tags + } + } + """, + datasetUri=dataset1.datasetUri, + username=user.username, + groups=[group.name], + input={ + 'label': 'testing', + 'prefix': 'mylocation', + 'tags': ['test'], + 'terms': ['term'], + }, + ) + assert response.data.createDatasetStorageLocation.label == 'testing' + assert response.data.createDatasetStorageLocation.S3Prefix == 'mylocation' + assert 'test' in response.data.createDatasetStorageLocation.tags + + +def test_manage_dataset_location(client, dataset1, user, group): + response = client.query( + """ + query GetDataset($datasetUri:String!){ + getDataset(datasetUri:$datasetUri){ + label + AwsAccountId + description + region + imported + importedS3Bucket + locations{ + nodes{ + locationUri + } + } + } + } + """, + datasetUri=dataset1.datasetUri, + username=user.username, + groups=[group.name], + ) + assert response.data.getDataset.locations.nodes[0].locationUri + + response = client.query( + """ + query getDatasetStorageLocation($locationUri:String!){ + getDatasetStorageLocation(locationUri:$locationUri){ + locationUri + S3Prefix + label + tags + } + } + """, + locationUri=response.data.getDataset.locations.nodes[0].locationUri, + username=user.username, + groups=[group.name], + ) + assert response.data.getDatasetStorageLocation.label == 'testing' + assert response.data.getDatasetStorageLocation.S3Prefix == 'mylocation' + + response = client.query( + """ + mutation updateDatasetStorageLocation($locationUri:String!, $input:ModifyDatasetStorageLocationInput!){ + updateDatasetStorageLocation(locationUri:$locationUri, input:$input){ + locationUri + S3Prefix + label + tags + } + } + """, + locationUri=response.data.getDatasetStorageLocation.locationUri, + username=user.username, + input={'label': 'testing2', 'terms': ['ert']}, + groups=[group.name], + ) + assert response.data.updateDatasetStorageLocation.label == 'testing2' + assert response.data.updateDatasetStorageLocation.S3Prefix == 'mylocation' + assert 'test' in response.data.updateDatasetStorageLocation.tags + + response = client.query( + """ + mutation deleteDatasetStorageLocation($locationUri: String!){ + deleteDatasetStorageLocation(locationUri:$locationUri) + } + """, + locationUri=response.data.updateDatasetStorageLocation.locationUri, + username=user.username, + groups=[group.name], + ) + assert response.data.deleteDatasetStorageLocation diff --git a/tests/modules/datasets/test_dataset_permissions.py b/tests/modules/datasets/test_dataset_permissions.py new file mode 100644 index 000000000..cb8febb63 --- /dev/null +++ b/tests/modules/datasets/test_dataset_permissions.py @@ -0,0 +1,123 @@ +from dataall.base.context import set_context, RequestContext +from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.db.resource_policy_repositories import ResourcePolicy +from dataall.base.db.exceptions import ResourceUnauthorized +from dataall.core.permissions.permissions import TENANT_ALL +from dataall.modules.datasets.services.dataset_permissions import DATASET_WRITE, UPDATE_DATASET, MANAGE_DATASETS, \ + DATASET_READ +from dataall.modules.datasets.services.dataset_service import DatasetService +from dataall.modules.datasets_base.db.dataset_models import Dataset +from dataall.modules.datasets_base.services.permissions import DATASET_TABLE_READ + +from tests.core.permissions.test_permission import * +from dataall.core.organizations.db.organization_repositories import Organization + + +def test_attach_resource_policy(db, user, group, dataset_fixture): + permissions(db, ENVIRONMENT_ALL + ORGANIZATION_ALL + DATASET_READ + DATASET_WRITE + DATASET_TABLE_READ) + with db.scoped_session() as session: + ResourcePolicy.attach_resource_policy( + session=session, + group=group.name, + permissions=DATASET_WRITE, + resource_uri=dataset_fixture.datasetUri, + resource_type=Dataset.__name__, + ) + assert ResourcePolicy.check_user_resource_permission( + session=session, + username=user.username, + groups=[group.name], + permission_name=UPDATE_DATASET, + resource_uri=dataset_fixture.datasetUri, + ) + + +def test_attach_tenant_policy( + db, user, group, dataset_fixture, permissions, tenant +): + with db.scoped_session() as session: + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group.name, + permissions=[MANAGE_DATASETS], + tenant_name='dataall', + ) + + assert TenantPolicy.check_user_tenant_permission( + session=session, + username=user.username, + groups=[group.name], + permission_name=MANAGE_DATASETS, + tenant_name='dataall', + ) + + +def test_unauthorized_resource_policy( + db, user, group, dataset_fixture, permissions +): + with pytest.raises(ResourceUnauthorized): + with db.scoped_session() as session: + assert ResourcePolicy.check_user_resource_permission( + session=session, + username=user.username, + groups=[group.name], + permission_name='UNKNOWN_PERMISSION', + resource_uri=dataset_fixture.datasetUri, + ) + + +def test_create_dataset(db, user, group, dataset_fixture, permissions, tenant): + with db.scoped_session() as session: + set_context(RequestContext(db, user.username, [group.name])) + + TenantPolicy.attach_group_tenant_policy( + session=session, + group=group.name, + permissions=TENANT_ALL, + tenant_name='dataall', + ) + org_with_perm = Organization.create_organization( + session=session, + data={ + 'label': 'OrgWithPerm', + 'SamlGroupName': group.name, + 'description': 'desc', + 'tags': [], + }, + ) + env_with_perm = EnvironmentService.create_environment( + session=session, + uri=org_with_perm.organizationUri, + data={ + 'label': 'EnvWithPerm', + 'organizationUri': org_with_perm.organizationUri, + 'SamlGroupName': group.name, + 'description': 'desc', + 'AwsAccountId': '123456789012', + 'region': 'eu-west-1', + 'cdk_role_name': 'cdkrole', + }, + ) + + data = dict( + label='label', + owner='foo', + SamlAdminGroupName=group.name, + businessOwnerDelegationEmails=['foo@amazon.com'], + businessOwnerEmail=['bar@amazon.com'], + name='name', + S3BucketName='S3BucketName', + GlueDatabaseName='GlueDatabaseName', + KmsAlias='kmsalias', + AwsAccountId='123456789012', + region='eu-west-1', + IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', + IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', + ) + + dataset = DatasetService.create_dataset( + uri=env_with_perm.environmentUri, + admin_group=group.name, + data=data, + ) + assert dataset diff --git a/tests/modules/datasets/test_dataset_profiling.py b/tests/modules/datasets/test_dataset_profiling.py new file mode 100644 index 000000000..c3c380cd1 --- /dev/null +++ b/tests/modules/datasets/test_dataset_profiling.py @@ -0,0 +1,153 @@ +from unittest.mock import MagicMock + +import pytest + +from dataall.modules.datasets.api.dataset.enums import ConfidentialityClassification +from dataall.modules.datasets_base.db.dataset_models import DatasetProfilingRun, Dataset, DatasetTable + + +@pytest.fixture(scope='module', autouse=True) +def org2(org, user2, group2, tenant): + org2 = org('testorg2', group2, user2) + yield org2 + + +@pytest.fixture(scope='module', autouse=True) +def env2(env, org2, user2, group2, tenant): + env2 = env(org2, 'dev2', user2.username, group2.name, '2222222222', 'eu-west-1') + yield env2 + + +def start_profiling_run(client, dataset, table, user, group): + dataset.GlueProfilingJobName = ('profile-job',) + dataset.GlueProfilingTriggerSchedule = ('cron(* 2 * * ? *)',) + dataset.GlueProfilingTriggerName = ('profile-job',) + response = client.query( + """ + mutation startDatasetProfilingRun($input:StartDatasetProfilingRunInput){ + startDatasetProfilingRun(input:$input) + { + profilingRunUri + } + } + """, + username=user.username, + input={'datasetUri': dataset.datasetUri, 'GlueTableName': table.name}, + groups=[group.name], + ) + return response + + +def test_start_profiling_run_authorized(client, dataset_fixture, table_fixture, db, user, group): + response = start_profiling_run(client, dataset_fixture, table_fixture, user, group) + profiling = response.data.startDatasetProfilingRun + assert profiling.profilingRunUri + with db.scoped_session() as session: + profiling = session.query(DatasetProfilingRun).get( + profiling.profilingRunUri + ) + profiling.GlueJobRunId = 'jr_111111111111' + session.commit() + + +def test_start_profiling_run_unauthorized(dataset_fixture, table_fixture, client, db, user2, group2): + response = start_profiling_run(client, dataset_fixture, table_fixture, user2, group2) + assert 'UnauthorizedOperation' in response.errors[0].message + + +def test_get_table_profiling_run_authorized( + client, dataset_fixture, table_fixture, db, user, group +): + response = client.query( + """ + query getDatasetTableProfilingRun($tableUri:String!){ + getDatasetTableProfilingRun(tableUri:$tableUri){ + profilingRunUri + status + GlueTableName + } + } + """, + tableUri=table_fixture.tableUri, + groups=[group.name], + username=user.username, + ) + assert response.data.getDatasetTableProfilingRun['profilingRunUri'] + assert response.data.getDatasetTableProfilingRun['status'] == 'RUNNING' + assert response.data.getDatasetTableProfilingRun['GlueTableName'] == 'table1' + + +def test_get_table_profiling_run_unauthorized( + client, dataset_fixture, table_fixture, db, user2, group2 +): + response = client.query( + """ + query getDatasetTableProfilingRun($tableUri:String!){ + getDatasetTableProfilingRun(tableUri:$tableUri){ + profilingRunUri + status + GlueTableName + } + } + """, + tableUri=table_fixture.tableUri, + groups=[group2.name], + username=user2.username, + ) + assert 'UnauthorizedOperation' in response.errors[0].message + + +def test_list_table_profiling_runs_authorized( + client, dataset_fixture, table_fixture, db, user, group +): + response = client.query( + """ + query listDatasetTableProfilingRuns($tableUri:String!){ + listDatasetTableProfilingRuns(tableUri:$tableUri){ + count + nodes{ + profilingRunUri + status + GlueTableName + } + + } + } + """, + tableUri=table_fixture.tableUri, + groups=[group.name], + username=user.username, + ) + assert response.data.listDatasetTableProfilingRuns['count'] == 1 + assert response.data.listDatasetTableProfilingRuns['nodes'][0]['profilingRunUri'] + assert ( + response.data.listDatasetTableProfilingRuns['nodes'][0]['status'] == 'RUNNING' + ) + assert ( + response.data.listDatasetTableProfilingRuns['nodes'][0]['GlueTableName'] + == 'table1' + ) + + +def test_list_table_profiling_runs_unauthorized( + client, dataset_fixture, table_fixture, db, user2, group2 +): + response = client.query( + """ + query listDatasetTableProfilingRuns($tableUri:String!){ + listDatasetTableProfilingRuns(tableUri:$tableUri){ + count + nodes{ + profilingRunUri + status + GlueTableName + } + + } + } + """, + tableUri=table_fixture.tableUri, + groups=[group2.name], + username=user2.username, + ) + assert 'UnauthorizedOperation' in response.errors[0].message diff --git a/tests/modules/datasets/test_dataset_resource_found.py b/tests/modules/datasets/test_dataset_resource_found.py new file mode 100644 index 000000000..37af0d093 --- /dev/null +++ b/tests/modules/datasets/test_dataset_resource_found.py @@ -0,0 +1,136 @@ +from dataall.modules.datasets_base.db.dataset_models import Dataset +from dataall.modules.datasets.services.dataset_permissions import CREATE_DATASET + + +def get_env(client, env_fixture, group): + return client.query( + """ + query GetEnv($environmentUri:String!){ + getEnvironment(environmentUri:$environmentUri){ + organization{ + organizationUri + } + environmentUri + label + AwsAccountId + region + SamlGroupName + owner + stack{ + EcsTaskArn + EcsTaskId + } + parameters { + key + value + } + } + } + """, + username='alice', + environmentUri=env_fixture.environmentUri, + groups=[group.name], + ) + + +def test_dataset_resource_found(db, client, env_fixture, org_fixture, group2, user, group3, group, dataset): + response = client.query( + """ + query listEnvironmentGroupInvitationPermissions($environmentUri:String){ + listEnvironmentGroupInvitationPermissions(environmentUri:$environmentUri){ + permissionUri + name + type + } + } + """, + username=user.username, + groups=[group.name, group2.name], + filter={}, + ) + + env_permissions = [ + p.name for p in response.data.listEnvironmentGroupInvitationPermissions + ] + assert CREATE_DATASET in env_permissions + + response = client.query( + """ + mutation inviteGroupOnEnvironment($input:InviteGroupOnEnvironmentInput){ + inviteGroupOnEnvironment(input:$input){ + environmentUri + } + } + """, + username='alice', + input=dict( + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + permissions=env_permissions, + environmentIAMRoleName='myteamrole', + ), + groups=[group.name, group2.name], + ) + print(response) + assert response.data.inviteGroupOnEnvironment + + response = client.query( + """ + query getGroup($groupUri:String!, $environmentUri:String){ + getGroup(groupUri:$groupUri){ + environmentPermissions(environmentUri:$environmentUri){ + name + } + } + } + """, + username=user.username, + groups=[group2.name], + groupUri=group2.name, + environmentUri=env_fixture.environmentUri, + ) + env_permissions = [p.name for p in response.data.getGroup.environmentPermissions] + assert CREATE_DATASET in env_permissions + + dataset = dataset( + org=org_fixture, env=env_fixture, name='dataset1', owner='bob', group=group2.name + ) + assert dataset.datasetUri + + response = client.query( + """ + mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ + removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ + environmentUri + } + } + """, + username='alice', + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + groups=[group.name, group2.name], + ) + print(response) + + assert 'EnvironmentResourcesFound' in response.errors[0].message + with db.scoped_session() as session: + dataset = session.query(Dataset).get(dataset.datasetUri) + session.delete(dataset) + session.commit() + + response = client.query( + """ + mutation removeGroupFromEnvironment($environmentUri: String!, $groupUri: String!){ + removeGroupFromEnvironment(environmentUri: $environmentUri, groupUri: $groupUri){ + environmentUri + } + } + """, + username='alice', + environmentUri=env_fixture.environmentUri, + groupUri=group2.name, + groups=[group.name, group2.name], + ) + print(response) + assert response.data.removeGroupFromEnvironment + diff --git a/tests/modules/datasets/test_dataset_stack.py b/tests/modules/datasets/test_dataset_stack.py new file mode 100644 index 000000000..a8573c009 --- /dev/null +++ b/tests/modules/datasets/test_dataset_stack.py @@ -0,0 +1,85 @@ +import json +from unittest.mock import MagicMock + +import pytest +from aws_cdk import App + +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.datasets.cdk.dataset_stack import DatasetStack +from dataall.modules.datasets_base.db.dataset_models import Dataset + +@pytest.fixture(scope='module', autouse=True) +def dataset(db, env_fixture: Environment) -> Dataset: + with db.scoped_session() as session: + dataset = Dataset( + label='thisdataset', + environmentUri=env_fixture.environmentUri, + organizationUri=env_fixture.organizationUri, + name='thisdataset', + description='test', + AwsAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + S3BucketName='bucket', + GlueDatabaseName='db', + IAMDatasetAdminRoleArn='role', + IAMDatasetAdminUserArn='xxx', + KmsAlias='xxx', + owner='me', + confidentiality='C1', + businessOwnerEmail='jeff', + businessOwnerDelegationEmails=['andy'], + SamlAdminGroupName=env_fixture.SamlGroupName, + GlueCrawlerName='dhCrawler', + ) + session.add(dataset) + yield dataset + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker, db, dataset, env_fixture, org_fixture): + mocker.patch('dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_engine', return_value=db) + mocker.patch( + 'dataall.modules.datasets.cdk.dataset_stack.DatasetStack.get_target', return_value=dataset + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_delegation_role_name', + return_value="dataall-pivot-role-name-pytest", + ) + lf_client = MagicMock() + mocker.patch( + 'dataall.modules.datasets.cdk.dataset_stack.LakeFormationDatasetClient', + return_value=lf_client, + ) + lf_client.return_value.check_existing_lf_registered_location = False + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_target', + return_value=dataset, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_engine', + return_value=db, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_environment', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_organization', + return_value=org_fixture, + ) + + +@pytest.fixture(scope='function', autouse=True) +def template(dataset): + app = App() + DatasetStack(app, 'Dataset', target_uri=dataset.datasetUri) + return json.dumps(app.synth().get_stack_by_name('Dataset').template) + + +def test_resources_created(template): + assert 'AWS::S3::Bucket' in template + assert 'AWS::KMS::Key' in template + assert 'AWS::IAM::Role' in template + assert 'AWS::IAM::Policy' in template + assert 'AWS::S3::BucketPolicy' in template + assert 'AWS::Glue::Job' in template diff --git a/tests/modules/datasets/test_dataset_table.py b/tests/modules/datasets/test_dataset_table.py new file mode 100644 index 000000000..178f062fa --- /dev/null +++ b/tests/modules/datasets/test_dataset_table.py @@ -0,0 +1,282 @@ +from dataall.modules.datasets.services.dataset_table_service import DatasetTableService +from dataall.modules.datasets_base.db.dataset_models import DatasetTableColumn, DatasetTable, Dataset + + +def test_add_tables(table, dataset_fixture, db): + for i in range(0, 10): + table(dataset=dataset_fixture, name=f'table{i+1}', username=dataset_fixture.owner) + + with db.scoped_session() as session: + nb = session.query(DatasetTable).count() + assert nb == 10 + + +def test_update_table(client, table, dataset_fixture, db, user, group): + table_to_update = table( + dataset=dataset_fixture, name=f'table_to_update', username=dataset_fixture.owner + ) + response = client.query( + """ + mutation UpdateDatasetTable($tableUri:String!,$input:ModifyDatasetTableInput!){ + updateDatasetTable(tableUri:$tableUri,input:$input){ + tableUri + description + tags + } + } + """, + username=user.username, + groups=[group.name], + tableUri=table_to_update.tableUri, + input={ + 'description': 'test update', + 'tags': ['t1', 't2'], + }, + ) + assert response.data.updateDatasetTable.description == 'test update' + assert 't1' in response.data.updateDatasetTable.tags + + +def test_add_columns(table, dataset_fixture, db): + with db.scoped_session() as session: + table = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') + .first() + ) + table_col = DatasetTableColumn( + name='col1', + description='None', + label='col1', + owner=table.owner, + datasetUri=table.datasetUri, + tableUri=table.tableUri, + AWSAccountId=table.AWSAccountId, + GlueDatabaseName=table.GlueDatabaseName, + GlueTableName=table.GlueTableName, + region=table.region, + typeName='String', + ) + session.add(table_col) + + +def test_list_dataset_tables(client, dataset_fixture): + q = """ + query GetDataset($datasetUri:String!,$tableFilter:DatasetTableFilter){ + getDataset(datasetUri:$datasetUri){ + datasetUri + tables(filter:$tableFilter){ + count + nodes{ + tableUri + name + label + GlueDatabaseName + GlueTableName + S3Prefix + } + } + } + } + """ + response = client.query( + q, + username=dataset_fixture.owner, + datasetUri=dataset_fixture.datasetUri, + tableFilter={'pageSize': 100}, + groups=[dataset_fixture.SamlAdminGroupName], + ) + assert response.data.getDataset.tables.count >= 10 + assert len(response.data.getDataset.tables.nodes) >= 10 + + response = client.query( + q, + username=dataset_fixture.owner, + datasetUri=dataset_fixture.datasetUri, + tableFilter={'pageSize': 3}, + groups=[dataset_fixture.SamlAdminGroupName], + ) + assert response.data.getDataset.tables.count >= 10 + assert len(response.data.getDataset.tables.nodes) == 3 + + response = client.query( + q, + username=dataset_fixture.owner, + datasetUri=dataset_fixture.datasetUri, + tableFilter={'pageSize': 100, 'term': 'table1'}, + groups=[dataset_fixture.SamlAdminGroupName], + ) + assert response.data.getDataset.tables.count == 2 + assert len(response.data.getDataset.tables.nodes) == 2 + + +def test_update_dataset_table_column(client, table, dataset_fixture, db): + with db.scoped_session() as session: + table = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') + .first() + ) + column = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == table.tableUri) + .first() + ) + response = client.query( + """ + mutation updateDatasetTableColumn($columnUri:String!,$input:DatasetTableColumnInput){ + updateDatasetTableColumn(columnUri:$columnUri,input:$input){ + description + } + } + """, + username=dataset_fixture.owner, + columnUri=column.columnUri, + input={'description': 'My new description'}, + groups=[dataset_fixture.SamlAdminGroupName], + ) + print('response', response) + assert ( + response.data.updateDatasetTableColumn.description == 'My new description' + ) + + column = session.query(DatasetTableColumn).get( + column.columnUri + ) + assert column.description == 'My new description' + response = client.query( + """ + mutation updateDatasetTableColumn($columnUri:String!,$input:DatasetTableColumnInput){ + updateDatasetTableColumn(columnUri:$columnUri,input:$input){ + description + } + } + """, + username='unauthorized', + columnUri=column.columnUri, + input={'description': 'My new description'}, + ) + assert 'Unauthorized' in response.errors[0].message + + +def test_sync_tables_and_columns(client, table, dataset_fixture, db): + with db.scoped_session() as session: + table = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') + .first() + ) + column = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == table.tableUri) + .first() + ) + glue_tables = [ + { + 'Name': 'new_table', + 'DatabaseName': dataset_fixture.GlueDatabaseName, + 'StorageDescriptor': { + 'Columns': [ + { + 'Name': 'col1', + 'Type': 'string', + 'Comment': 'comment_col', + 'Parameters': {'colp1': 'p1'}, + }, + ], + 'Location': f's3://{dataset_fixture.S3BucketName}/table1', + 'Parameters': {'p1': 'p1'}, + }, + 'PartitionKeys': [ + { + 'Name': 'partition1', + 'Type': 'string', + 'Comment': 'comment_partition', + 'Parameters': {'partition_1': 'p1'}, + }, + ], + }, + { + 'Name': 'table1', + 'DatabaseName': dataset_fixture.GlueDatabaseName, + 'StorageDescriptor': { + 'Columns': [ + { + 'Name': 'col1', + 'Type': 'string', + 'Comment': 'comment_col', + 'Parameters': {'colp1': 'p1'}, + }, + ], + 'Location': f's3://{dataset_fixture.S3BucketName}/table1', + 'Parameters': {'p1': 'p1'}, + }, + 'PartitionKeys': [ + { + 'Name': 'partition1', + 'Type': 'string', + 'Comment': 'comment_partition', + 'Parameters': {'partition_1': 'p1'}, + }, + ], + }, + ] + + assert DatasetTableService.sync_existing_tables(session, dataset_fixture.datasetUri, glue_tables) + new_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'new_table') + .first() + ) + assert new_table + assert new_table.GlueTableName == 'new_table' + columns: [DatasetTableColumn] = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == new_table.tableUri) + .order_by(DatasetTableColumn.columnType.asc()) + .all() + ) + assert len(columns) == 2 + assert columns[0].columnType == 'column' + assert columns[1].columnType == 'partition_0' + + existing_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table1') + .first() + ) + assert existing_table + assert existing_table.GlueTableName == 'table1' + columns: [DatasetTableColumn] = ( + session.query(DatasetTableColumn) + .filter(DatasetTableColumn.tableUri == new_table.tableUri) + .order_by(DatasetTableColumn.columnType.asc()) + .all() + ) + assert len(columns) == 2 + assert columns[0].columnType == 'column' + assert columns[1].columnType == 'partition_0' + + deleted_table: DatasetTable = ( + session.query(DatasetTable) + .filter(DatasetTable.name == 'table2') + .first() + ) + assert deleted_table.LastGlueTableStatus == 'Deleted' + + +def test_delete_table(client, table, dataset_fixture, db, group): + table_to_delete = table( + dataset=dataset_fixture, name=f'table_to_update', username=dataset_fixture.owner + ) + response = client.query( + """ + mutation deleteDatasetTable($tableUri:String!){ + deleteDatasetTable(tableUri:$tableUri) + } + """, + username='alice', + groups=[group.name], + tableUri=table_to_delete.tableUri, + ) + assert response.data.deleteDatasetTable diff --git a/tests/modules/datasets/test_environment_stack_with_dataset.py b/tests/modules/datasets/test_environment_stack_with_dataset.py new file mode 100644 index 000000000..6448c8ee9 --- /dev/null +++ b/tests/modules/datasets/test_environment_stack_with_dataset.py @@ -0,0 +1,141 @@ +import pytest +from aws_cdk import App +from aws_cdk.assertions import Template, Match + +from dataall.core.environment.cdk.environment_stack import EnvironmentSetup +from dataall.core.environment.db.environment_models import EnvironmentGroup +from dataall.modules.datasets_base.db.dataset_models import Dataset + + +@pytest.fixture(scope='function', autouse=True) +def patch_extensions(mocker): + for extension in EnvironmentSetup._EXTENSIONS: + if extension.__name__ not in ["DatasetCustomResourcesExtension", "DatasetGlueProfilerExtension"]: + mocker.patch( + f"{extension.__module__}.{extension.__name__}.extent", + return_value=True, + ) + + +@pytest.fixture(scope='function', autouse=True) +def another_group(db, env_fixture): + with db.scoped_session() as session: + env_group: EnvironmentGroup = EnvironmentGroup( + environmentUri=env_fixture.environmentUri, + groupUri='anothergroup', + environmentIAMRoleArn='aontherGroupArn', + environmentIAMRoleName='anotherGroupRole', + environmentAthenaWorkGroup='workgroup', + ) + session.add(env_group) + dataset = Dataset( + label='thisdataset', + environmentUri=env_fixture.environmentUri, + organizationUri=env_fixture.organizationUri, + name='anotherdataset', + description='test', + AwsAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + S3BucketName='bucket', + GlueDatabaseName='db', + IAMDatasetAdminRoleArn='role', + IAMDatasetAdminUserArn='xxx', + KmsAlias='xxx', + owner='me', + confidentiality='C1', + businessOwnerEmail='jeff', + businessOwnerDelegationEmails=['andy'], + SamlAdminGroupName=env_group.groupUri, + GlueCrawlerName='dhCrawler', + ) + session.add(dataset) + yield env_group + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker, db, env_fixture, another_group, permissions): + mocker.patch( + 'dataall.core.environment.cdk.environment_stack.EnvironmentSetup.get_engine', + return_value=db, + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_delegation_role_name', + return_value='dataall-pivot-role-name-pytest', + ) + mocker.patch( + 'dataall.base.aws.parameter_store.ParameterStoreManager.get_parameter_value', + return_value='False', + ) + mocker.patch( + 'dataall.core.environment.cdk.environment_stack.EnvironmentSetup.get_target', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.environment.cdk.environment_stack.EnvironmentSetup.get_environment_groups', + return_value=[another_group], + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_account', + return_value='012345678901x', + ) + mocker.patch('dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_target', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.environment.cdk.environment_stack.EnvironmentSetup.get_environment_group_permissions', + return_value=[permission.name for permission in permissions], + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_external_id_secret', + return_value='secretIdvalue', + ) + + +def test_resources_created(env_fixture, org_fixture): + app = App() + + # Create the Stack + stack = EnvironmentSetup(app, 'Environment', target_uri=env_fixture.environmentUri) + app.synth() + # Prepare the stack for assertions. + template = Template.from_stack(stack) + + # Assert that we have created: + template.resource_properties_count_is( + type="AWS::S3::Bucket", + props={ + 'BucketName': env_fixture.EnvironmentDefaultBucketName, + 'BucketEncryption': { + 'ServerSideEncryptionConfiguration': [{ + 'ServerSideEncryptionByDefault': {'SSEAlgorithm': 'AES256'} + }] + }, + 'PublicAccessBlockConfiguration': { + 'BlockPublicAcls': True, + 'BlockPublicPolicy': True, + 'IgnorePublicAcls': True, + 'RestrictPublicBuckets': True + }, + }, + count=1 + ) + template.resource_properties_count_is( + type="AWS::Lambda::Function", + props={ + 'FunctionName': Match.string_like_regexp("^.*lf-settings-handler.*$"), + }, + count=1 + ) + template.resource_properties_count_is( + type="AWS::Lambda::Function", + props={ + 'FunctionName': Match.string_like_regexp("^.*gluedb-lf-handler.*$"), + }, + count=1 + ) + template.resource_count_is("AWS::Lambda::Function", 5) + template.resource_count_is("AWS::SSM::Parameter", 5) + template.resource_count_is("AWS::IAM::Role", 5) + template.resource_count_is("AWS::IAM::Policy", 4) diff --git a/tests/modules/datasets/test_glue_profiler.py b/tests/modules/datasets/test_glue_profiler.py new file mode 100644 index 000000000..7e45695c2 --- /dev/null +++ b/tests/modules/datasets/test_glue_profiler.py @@ -0,0 +1,8 @@ +from dataall.modules.datasets.cdk.dataset_glue_profiler_extension import DatasetGlueProfilerExtension +from pathlib import Path + + +def test_glue_profiler_exist(): + path = DatasetGlueProfilerExtension.get_path_to_asset() + assert Path(path).exists() + diff --git a/tests/api/test_share.py b/tests/modules/datasets/test_share.py similarity index 84% rename from tests/api/test_share.py rename to tests/modules/datasets/test_share.py index 71af1c5c5..c868d078a 100644 --- a/tests/api/test_share.py +++ b/tests/modules/datasets/test_share.py @@ -2,7 +2,14 @@ import typing import pytest -import dataall +from dataall.core.environment.db.environment_models import Environment, EnvironmentGroup +from dataall.core.organizations.db.organization_models import Organization +from dataall.modules.dataset_sharing.api.enums import ShareableType, PrincipalType +from dataall.modules.dataset_sharing.db.enums import ShareObjectActions, ShareItemActions, ShareObjectStatus, \ + ShareItemStatus +from dataall.modules.dataset_sharing.db.share_object_models import ShareObject, ShareObjectItem +from dataall.modules.dataset_sharing.db.share_object_repositories import ShareObjectRepository, ShareItemSM, ShareObjectSM +from dataall.modules.datasets_base.db.dataset_models import DatasetTable, Dataset def random_table_name(): @@ -19,36 +26,35 @@ def cpltz(l): @pytest.fixture(scope='module') def org1(org: typing.Callable, user, group, tenant): # user, group and tenant are fixtures defined in conftest - yield org('testorg', user.userName, group.name) + yield org('testorg', group, user) @pytest.fixture(scope='module') -def env1(environment: typing.Callable, org1: dataall.db.models.Organization, user, group - ) -> dataall.db.models.Environment: +def env1(env: typing.Callable, org1: Organization, user, group + ) -> Environment: # user, group and tenant are fixtures defined in conftest - yield environment( - organization=org1, - awsAccountId="1" * 12, - label="source_environment", - owner=user.userName, - samlGroupName=group.name, - environmentDefaultIAMRoleName=f"source-{group.name}", - dashboardsEnabled=False, + yield env( + org=org1, + account="1" * 12, + envname="source_environment", + owner=user.username, + group=group.name, + role=f"source-{group.name}", ) @pytest.fixture(scope='module') def env1group(environment_group: typing.Callable, env1, user, group - ) -> dataall.db.models.EnvironmentGroup: + ) -> EnvironmentGroup: yield environment_group( environment=env1, - group=group, + group=group.name, ) @pytest.fixture(scope='module') -def dataset1(dataset_model: typing.Callable, org1: dataall.db.models.Organization, env1: dataall.db.models.Environment - ) -> dataall.db.models.Dataset: +def dataset1(dataset_model: typing.Callable, org1: Organization, env1: Environment + ) -> Dataset: yield dataset_model( organization=org1, environment=env1, @@ -57,46 +63,44 @@ def dataset1(dataset_model: typing.Callable, org1: dataall.db.models.Organizatio @pytest.fixture(scope='module') -def tables1(table: typing.Callable, dataset1: dataall.db.models.Dataset): +def tables1(table: typing.Callable, dataset1: Dataset): for i in range(1, 100): table(dataset1, name=random_table_name(), username=dataset1.owner) @pytest.fixture(scope="module", autouse=True) -def table1(table: typing.Callable, dataset1: dataall.db.models.Dataset, - user: dataall.db.models.User) -> dataall.db.models.DatasetTable: +def table1(table: typing.Callable, dataset1: Dataset) -> DatasetTable: yield table( dataset=dataset1, name="table1", - username=user.userName + username='alice' ) @pytest.fixture(scope='module') -def org2(org: typing.Callable, user2, group2, tenant) -> dataall.db.models.Organization: - yield org('org2', user2.userName, group2.name) +def org2(org: typing.Callable, group2, user2) -> Organization: + yield org('org2', group2, user2) @pytest.fixture(scope='module') def env2( - environment: typing.Callable, org2: dataall.db.models.Organization, user2, group2 -) -> dataall.db.models.Environment: + env: typing.Callable, org2: Organization, user2, group2 +) -> Environment: # user, group and tenant are fixtures defined in conftest - yield environment( - organization=org2, - awsAccountId="2" * 12, - label="target_environment", - owner=user2.userName, - samlGroupName=group2.name, - environmentDefaultIAMRoleName=f"source-{group2.name}", - dashboardsEnabled=False, + yield env( + org=org2, + account="2" * 12, + envname="target_environment", + owner=user2.username, + group=group2.name, + role=f"source-{group2.name}", ) @pytest.fixture(scope='module') def dataset2( - dataset_model: typing.Callable, org2: dataall.db.models.Organization, env2: dataall.db.models.Environment -) -> dataall.db.models.Dataset: + dataset_model: typing.Callable, org2: Organization, env2: Environment +) -> Dataset: yield dataset_model( organization=org2, environment=env2, @@ -111,20 +115,19 @@ def tables2(table, dataset2): @pytest.fixture(scope="module", autouse=True) -def table2(table: typing.Callable, dataset2: dataall.db.models.Dataset, - user2: dataall.db.models.User) -> dataall.db.models.DatasetTable: +def table2(table: typing.Callable, dataset2: Dataset) -> DatasetTable: yield table( dataset=dataset2, name="table2", - username=user2.userName + username='bob' ) @pytest.fixture(scope='module') -def env2group(environment_group: typing.Callable, env2, user2, group2) -> dataall.db.models.EnvironmentGroup: +def env2group(environment_group: typing.Callable, env2, user2, group2) -> EnvironmentGroup: yield environment_group( environment=env2, - group=group2, + group=group2.name, ) @@ -135,18 +138,18 @@ def share1_draft( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, - env2: dataall.db.models.Environment, - env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: + dataset1: Dataset, + env2: Environment, + env2group: EnvironmentGroup, +) -> ShareObject: share1 = share( dataset=dataset1, environment=env2, env_group=env2group, - owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Draft.value + owner=user2.username, + status=ShareObjectStatus.Draft.value ) - + yield share1 # Cleanup share @@ -195,14 +198,14 @@ def share1_draft( @pytest.fixture(scope='function') def share1_item_pa( share_item: typing.Callable, - share1_draft: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable -) -> dataall.db.models.ShareObjectItem: + share1_draft: ShareObject, + table1: DatasetTable +) -> ShareObjectItem: # Cleaned up with share1_draft yield share_item( share=share1_draft, table=table1, - status=dataall.api.constants.ShareItemStatus.PendingApproval.value + status=ShareItemStatus.PendingApproval.value ) @@ -213,16 +216,16 @@ def share2_submitted( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, - env2: dataall.db.models.Environment, - env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: + dataset1: Dataset, + env2: Environment, + env2group: EnvironmentGroup, +) -> ShareObject: share2 = share( dataset=dataset1, environment=env2, env_group=env2group, - owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Submitted.value + owner=user2.username, + status=ShareObjectStatus.Submitted.value ) yield share2 # Cleanup share @@ -270,14 +273,14 @@ def share2_submitted( @pytest.fixture(scope='function') def share2_item_pa( share_item: typing.Callable, - share2_submitted: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable -) -> dataall.db.models.ShareObjectItem: + share2_submitted: ShareObject, + table1: DatasetTable +) -> ShareObjectItem: # Cleaned up with share2 yield share_item( share=share2_submitted, table=table1, - status=dataall.api.constants.ShareItemStatus.PendingApproval.value + status=ShareItemStatus.PendingApproval.value ) @@ -288,16 +291,16 @@ def share3_processed( user2, group2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, - env2: dataall.db.models.Environment, - env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: + dataset1: Dataset, + env2: Environment, + env2group: EnvironmentGroup, +) -> ShareObject: share3 = share( dataset=dataset1, environment=env2, env_group=env2group, - owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Processed.value + owner=user2.username, + status=ShareObjectStatus.Processed.value ) yield share3 # Cleanup share @@ -345,14 +348,14 @@ def share3_processed( @pytest.fixture(scope='function') def share3_item_shared( share_item: typing.Callable, - share3_processed: dataall.db.models.ShareObject, - table1: dataall.db.models.DatasetTable -) -> dataall.db.models.ShareObjectItem: + share3_processed: ShareObject, + table1:DatasetTable +) -> ShareObjectItem: # Cleaned up with share3 yield share_item( share=share3_processed, table=table1, - status=dataall.api.constants.ShareItemStatus.Share_Succeeded.value + status=ShareItemStatus.Share_Succeeded.value ) @@ -360,16 +363,16 @@ def share3_item_shared( def share4_draft( user2, share: typing.Callable, - dataset1: dataall.db.models.Dataset, - env2: dataall.db.models.Environment, - env2group: dataall.db.models.EnvironmentGroup, -) -> dataall.db.models.ShareObject: + dataset1: Dataset, + env2: Environment, + env2group: EnvironmentGroup, +) -> ShareObject: yield share( dataset=dataset1, environment=env2, env_group=env2group, - owner=user2.userName, - status=dataall.api.constants.ShareObjectStatus.Draft.value + owner=user2.username, + status=ShareObjectStatus.Draft.value ) @@ -378,7 +381,7 @@ def test_init(tables1, tables2): # Queries & mutations -def create_share_object(client, userName, group, groupUri, environmentUri, datasetUri, itemUri=None): +def create_share_object(client, username, group, groupUri, environmentUri, datasetUri, itemUri=None): q = """ mutation CreateShareObject( $datasetUri: String! @@ -404,17 +407,17 @@ def create_share_object(client, userName, group, groupUri, environmentUri, datas response = client.query( q, - username=userName, + username=username, groups=[group.name], datasetUri=datasetUri, - itemType=dataall.api.constants.ShareableType.Table.value if itemUri else None, + itemType=ShareableType.Table.value if itemUri else None, itemUri=itemUri, input={ 'environmentUri': environmentUri, 'groupUri': groupUri, 'principalId': groupUri, - 'principalType': dataall.api.constants.PrincipalType.Group.value, - 'requestPurpose': 'testShare' + 'principalType': PrincipalType.Group.value, + 'requestPurpose': 'testShare', }, ) @@ -475,7 +478,7 @@ def get_share_object(client, user, group, shareUri, filter): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, filter=filter, @@ -494,7 +497,7 @@ def update_share_request_purpose(client, user, group, shareUri, requestPurpose): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, requestPurpose=requestPurpose, @@ -513,7 +516,7 @@ def update_share_reject_purpose(client, user, group, shareUri, rejectPurpose): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, rejectPurpose=rejectPurpose, @@ -522,6 +525,7 @@ def update_share_reject_purpose(client, user, group, shareUri, rejectPurpose): print('Update share reject purpose response: ', response) return response + def list_dataset_share_objects(client, user, group, datasetUri): q = """ query ListDatasetShareObjects( @@ -568,7 +572,7 @@ def list_dataset_share_objects(client, user, group, datasetUri): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], datasetUri=datasetUri, ) @@ -590,7 +594,7 @@ def get_share_requests_to_me(client, user, group): """ response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name] ) # Print response @@ -611,7 +615,7 @@ def get_share_requests_from_me(client, user, group): """ response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name] ) # Print response @@ -634,7 +638,7 @@ def add_share_item(client, user, group, shareUri, itemUri, itemType): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, input={ @@ -656,7 +660,7 @@ def remove_share_item(client, user, group, shareItemUri): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareItemUri=shareItemUri ) @@ -693,7 +697,7 @@ def submit_share_object(client, user, group, shareUri): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, ) @@ -715,7 +719,7 @@ def approve_share_object(client, user, group, shareUri): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, ) @@ -737,7 +741,7 @@ def reject_share_object(client, user, group, shareUri): response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], shareUri=shareUri, rejectPurpose="testRejectShare" @@ -759,7 +763,7 @@ def revoke_items_share_object(client, user, group, shareUri, revoked_items_uris) response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], input={ 'shareUri': shareUri, @@ -814,7 +818,7 @@ def list_datasets_published_in_environment(client, user, group, environmentUri): """ response = client.query( q, - username=user.userName, + username=user.username, groups=[group.name], environmentUri=environmentUri, filter={}, @@ -830,7 +834,7 @@ def test_create_share_object_unauthorized(client, group3, dataset1, env2, env2gr # When a user that does not belong to environment and group creates request create_share_object_response = create_share_object( client=client, - userName='anonymous', + username='anonymous', group=group3, groupUri=env2group.groupUri, environmentUri=env2.environmentUri, @@ -846,7 +850,7 @@ def test_create_share_object_authorized(client, user2, group2, env2group, env2, # When a user that belongs to environment and group creates request create_share_object_response = create_share_object( client=client, - userName=user2.userName, + username=user2.username, group=group2, groupUri=env2group.groupUri, environmentUri=env2.environmentUri, @@ -854,7 +858,7 @@ def test_create_share_object_authorized(client, user2, group2, env2group, env2, ) # Then share object created with status Draft and user is 'Requester' assert create_share_object_response.data.createShareObject.shareUri - assert create_share_object_response.data.createShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert create_share_object_response.data.createShareObject.status == ShareObjectStatus.Draft.value assert create_share_object_response.data.createShareObject.userRoleForShareObject == 'Requesters' assert create_share_object_response.data.createShareObject.requestPurpose == 'testShare' @@ -864,7 +868,7 @@ def test_create_share_object_with_item_authorized(client, user2, group2, env2gro # When a user that belongs to environment and group creates request with table in the request create_share_object_response = create_share_object( client=client, - userName=user2.userName, + username=user2.username, group=group2, groupUri=env2group.groupUri, environmentUri=env2.environmentUri, @@ -874,7 +878,7 @@ def test_create_share_object_with_item_authorized(client, user2, group2, env2gro # Then share object created with status Draft and user is 'Requester' assert create_share_object_response.data.createShareObject.shareUri - assert create_share_object_response.data.createShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert create_share_object_response.data.createShareObject.status == ShareObjectStatus.Draft.value assert create_share_object_response.data.createShareObject.userRoleForShareObject == 'Requesters' assert create_share_object_response.data.createShareObject.requestPurpose == 'testShare' @@ -888,8 +892,7 @@ def test_create_share_object_with_item_authorized(client, user2, group2, env2gro ) assert get_share_object_response.data.getShareObject.get('items').nodes[0].itemUri == table1.tableUri - assert get_share_object_response.data.getShareObject.get('items').nodes[ - 0].itemType == dataall.api.constants.ShareableType.Table.name + assert get_share_object_response.data.getShareObject.get('items').nodes[0].itemType == ShareableType.Table.name def test_get_share_object(client, share1_draft, user, group): @@ -906,7 +909,7 @@ def test_get_share_object(client, share1_draft, user, group): # Then we get the info about the share assert get_share_object_response.data.getShareObject.shareUri == share1_draft.shareUri assert get_share_object_response.data.getShareObject.get( - 'principal').principalType == dataall.api.constants.PrincipalType.Group.name + 'principal').principalType == PrincipalType.Group.name assert get_share_object_response.data.getShareObject.get('principal').principalIAMRoleName assert get_share_object_response.data.getShareObject.get('principal').SamlGroupName assert get_share_object_response.data.getShareObject.get('principal').region @@ -915,7 +918,7 @@ def test_get_share_object(client, share1_draft, user, group): def test_update_share_request_purpose(client, share1_draft, user2, group2): # Given # Existing share object in status Draft (->fixture share1_draft) - # When a user from the requesters group updates + # When a user from the requesters group updates update_share_request_purpose_response = update_share_request_purpose( client=client, user=user2, @@ -940,7 +943,7 @@ def test_update_share_request_purpose(client, share1_draft, user2, group2): def test_update_share_request_purpose_unauthorized(client, share1_draft, user, group): # Given # Existing share object in status Draft (->fixture share1_draft) - # When a user from the approvers group attempts to update the request purpose + # When a user from the approvers group attempts to update the request purpose update_share_request_purpose_response = update_share_request_purpose( client=client, user=user, @@ -1080,8 +1083,7 @@ def test_add_share_item( # Then shared item was added to share object in status PendingApproval assert add_share_item_response.data.addSharedItem.shareUri == share1_draft.shareUri - assert add_share_item_response.data.addSharedItem.status == \ - dataall.api.constants.ShareItemStatus.PendingApproval.name + assert add_share_item_response.data.addSharedItem.status == ShareItemStatus.PendingApproval.name def test_remove_share_item( @@ -1097,11 +1099,11 @@ def test_remove_share_item( filter={"isShared": True}, ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share1_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When @@ -1138,11 +1140,11 @@ def test_submit_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share1_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When @@ -1155,8 +1157,7 @@ def test_submit_share_request( ) # Then share object status is changed to Submitted - assert submit_share_object_response.data.submitShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Submitted.name + assert submit_share_object_response.data.submitShareObject.status == ShareObjectStatus.Submitted.name assert submit_share_object_response.data.submitShareObject.userRoleForShareObject == 'Requesters' # and share item status stays in PendingApproval @@ -1169,13 +1170,13 @@ def test_submit_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] status = shareItem['status'] - assert status == dataall.api.constants.ShareItemStatus.PendingApproval.name + assert status == ShareItemStatus.PendingApproval.name def test_update_share_reject_purpose(client, share2_submitted, user, group): # Given # Existing share object in status Submitted (-> fixture share2_submitted) - # When a user from the approvers group updates the reject purpose + # When a user from the approvers group updates the reject purpose update_share_reject_purpose_response = update_share_reject_purpose( client=client, user=user, @@ -1200,7 +1201,7 @@ def test_update_share_reject_purpose(client, share2_submitted, user, group): def test_update_share_reject_purpose_unauthorized(client, share2_submitted, user2, group2): # Given # Existing share object in status Submitted (-> fixture share2_submitted) - # When a user from the requester group attempts to update the reject purpose + # When a user from the requester group attempts to update the reject purpose update_share_reject_purpose_response = update_share_reject_purpose( client=client, user=user2, @@ -1227,10 +1228,10 @@ def test_approve_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Submitted.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Submitted.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share2_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When we approve the share object @@ -1245,8 +1246,7 @@ def test_approve_share_request( assert approve_share_object_response.data.approveShareObject.userRoleForShareObject == 'Approvers' # Then share object status is changed to Approved - assert approve_share_object_response.data.approveShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Approved.name + assert approve_share_object_response.data.approveShareObject.status == ShareObjectStatus.Approved.name # and share item status is changed to Share_Approved get_share_object_response = get_share_object( @@ -1258,7 +1258,7 @@ def test_approve_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Approved.value + assert shareItem.status == ShareItemStatus.Share_Approved.value # When approved share object is processed and the shared items successfully shared _successfull_processing_for_share_object(db, share2_submitted) @@ -1272,11 +1272,11 @@ def test_approve_share_request( ) # Then share object status is changed to Processed - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value # And share item status is changed to Share_Succeeded shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value def test_reject_share_request( @@ -1293,10 +1293,10 @@ def test_reject_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Submitted.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Submitted.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share2_item_pa.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.PendingApproval.value + assert shareItem.status == ShareItemStatus.PendingApproval.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When we reject the share object @@ -1308,9 +1308,9 @@ def test_reject_share_request( ) # Then share object status is changed to Rejected - assert reject_share_object_response.data.rejectShareObject.status == \ - dataall.api.constants.ShareObjectStatus.Rejected.name + assert reject_share_object_response.data.rejectShareObject.status == ShareObjectStatus.Rejected.name assert reject_share_object_response.data.rejectShareObject.rejectPurpose == "testRejectShare" + # and share item status is changed to Share_Rejected get_share_object_response = get_share_object( client=client, @@ -1321,7 +1321,7 @@ def test_reject_share_request( ) shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Rejected.value + assert shareItem.status == ShareItemStatus.Share_Rejected.value def test_search_shared_items_in_environment( @@ -1337,7 +1337,7 @@ def test_search_shared_items_in_environment( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value list_datasets_published_in_environment_response = list_datasets_published_in_environment( client=client, @@ -1364,11 +1364,11 @@ def test_revoke_items_share_request( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share3_item_shared.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value revoked_items_uris = [node.shareItemUri for node in get_share_object_response.data.getShareObject.get('items').nodes] @@ -1382,7 +1382,7 @@ def test_revoke_items_share_request( revoked_items_uris=revoked_items_uris ) # Then share object changes to status Rejected - assert revoke_items_share_object_response.data.revokeItemsShareObject.status == dataall.api.constants.ShareObjectStatus.Revoked.value + assert revoke_items_share_object_response.data.revokeItemsShareObject.status == ShareObjectStatus.Revoked.value # And shared item changes to status Revoke_Approved get_share_object_response = get_share_object( @@ -1394,7 +1394,7 @@ def test_revoke_items_share_request( ) sharedItem = get_share_object_response.data.getShareObject.get('items').nodes[0] status = sharedItem['status'] - assert status == dataall.api.constants.ShareItemStatus.Revoke_Approved.value + assert status == ShareItemStatus.Revoke_Approved.value # Given the revoked share object is processed and the shared items # When approved share object is processed and the shared items successfully revoked (we re-use same function) @@ -1409,11 +1409,11 @@ def test_revoke_items_share_request( ) # Then share object status is changed to Processed - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value # And share item status is changed to Revoke_Succeeded shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] - assert shareItem.status == dataall.api.constants.ShareItemStatus.Revoke_Succeeded.value + assert shareItem.status == ShareItemStatus.Revoke_Succeeded.value def test_delete_share_object( @@ -1429,7 +1429,7 @@ def test_delete_share_object( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Draft.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Draft.value # When deleting the share object delete_share_object_response = delete_share_object( @@ -1456,11 +1456,11 @@ def test_delete_share_object_remaining_items_error( filter={"isShared": True} ) - assert get_share_object_response.data.getShareObject.status == dataall.api.constants.ShareObjectStatus.Processed.value + assert get_share_object_response.data.getShareObject.status == ShareObjectStatus.Processed.value shareItem = get_share_object_response.data.getShareObject.get("items").nodes[0] assert shareItem.shareItemUri == share3_item_shared.shareItemUri - assert shareItem.status == dataall.api.constants.ShareItemStatus.Share_Succeeded.value + assert shareItem.status == ShareItemStatus.Share_Succeeded.value assert get_share_object_response.data.getShareObject.get("items").count == 1 # When deleting the share object @@ -1477,16 +1477,16 @@ def test_delete_share_object_remaining_items_error( def _successfull_processing_for_share_object(db, share): with db.scoped_session() as session: print('Processing share with action ShareObjectActions.Start') - share = dataall.db.api.ShareObject.get_share_by_uri(session, share.shareUri) + share = ShareObjectRepository.get_share_by_uri(session, share.shareUri) - share_items_states = dataall.db.api.ShareObject.get_share_items_states(session, share.shareUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, share.shareUri) - Share_SM = dataall.db.api.ShareObjectSM(share.status) - new_share_state = Share_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Start.value) + Share_SM = ShareObjectSM(share.status) + new_share_state = Share_SM.run_transition(ShareObjectActions.Start.value) for item_state in share_items_states: - Item_SM = dataall.db.api.ShareItemSM(item_state) - new_state = Item_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Start.value) + Item_SM = ShareItemSM(item_state) + new_state = Item_SM.run_transition(ShareObjectActions.Start.value) Item_SM.update_state(session, share.shareUri, new_state) Share_SM.update_state(session, share, new_share_state) @@ -1494,14 +1494,14 @@ def _successfull_processing_for_share_object(db, share): print('Processing share with action ShareObjectActions.Finish \ and ShareItemActions.Success') - share = dataall.db.api.ShareObject.get_share_by_uri(session, share.shareUri) - share_items_states = dataall.db.api.ShareObject.get_share_items_states(session, share.shareUri) + share = ShareObjectRepository.get_share_by_uri(session, share.shareUri) + share_items_states = ShareObjectRepository.get_share_items_states(session, share.shareUri) - new_share_state = Share_SM.run_transition(dataall.db.models.Enums.ShareObjectActions.Finish.value) + new_share_state = Share_SM.run_transition(ShareObjectActions.Finish.value) for item_state in share_items_states: - Item_SM = dataall.db.api.ShareItemSM(item_state) - new_state = Item_SM.run_transition(dataall.db.models.Enums.ShareItemActions.Success.value) + Item_SM = ShareItemSM(item_state) + new_state = Item_SM.run_transition(ShareItemActions.Success.value) Item_SM.update_state(session, share.shareUri, new_state) Share_SM.update_state(session, share, new_share_state) diff --git a/tests/modules/feed/testhelper.py b/tests/modules/feed/testhelper.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/mlstudio/__init__.py b/tests/modules/mlstudio/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/mlstudio/cdk/__init__.py b/tests/modules/mlstudio/cdk/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/mlstudio/cdk/conftest.py b/tests/modules/mlstudio/cdk/conftest.py new file mode 100644 index 000000000..4b3327838 --- /dev/null +++ b/tests/modules/mlstudio/cdk/conftest.py @@ -0,0 +1,25 @@ +import pytest + +from dataall.core.environment.db.environment_models import Environment +from dataall.core.organizations.db.organization_models import Organization +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser + + +@pytest.fixture(scope='module', autouse=True) +def sgm_studio(db, env_fixture: Environment) -> SagemakerStudioUser: + with db.scoped_session() as session: + sm_user = SagemakerStudioUser( + label='thistable', + owner='me', + AWSAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + sagemakerStudioUserStatus='UP', + sagemakerStudioUserName='ProfileName', + sagemakerStudioUserNameSlugify='ProfileName', + sagemakerStudioDomainID='domain', + environmentUri=env_fixture.environmentUri, + RoleArn=env_fixture.EnvironmentDefaultIAMRoleArn, + SamlAdminGroupName=env_fixture.SamlGroupName, + ) + session.add(sm_user) + yield sm_user diff --git a/tests/modules/mlstudio/cdk/test_sagemaker_studio_stack.py b/tests/modules/mlstudio/cdk/test_sagemaker_studio_stack.py new file mode 100644 index 000000000..6711d0566 --- /dev/null +++ b/tests/modules/mlstudio/cdk/test_sagemaker_studio_stack.py @@ -0,0 +1,108 @@ +import pytest +from aws_cdk.assertions import Template +from aws_cdk import App, Stack, aws_iam + +from dataall.modules.mlstudio.cdk.mlstudio_stack import SagemakerStudioUserProfile, SageMakerDomainExtension + + +class MockEnvironmentSageMakerExtension(Stack): + def environment(self): + return self._environment + + def get_engine(self): + return self._db + + def __init__(self, scope, id, env, db, **kwargs): + super().__init__( + scope, + id, + description='Cloud formation stack of ENVIRONMENT: {}; URI: {}; DESCRIPTION: {}'.format( + env.label, + env.environmentUri, + env.description, + )[:1024], + **kwargs, + ) + self._environment = env + self._db = db + self.default_role = aws_iam.Role(self, "DefaultRole", + assumed_by=aws_iam.ServicePrincipal("lambda.amazonaws.com"), + description="Example role..." + ) + self.group_roles = [] + SageMakerDomainExtension.extent(self) + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods_sagemaker_studio(mocker, db, sgm_studio, env_fixture, org_fixture): + mocker.patch( + 'dataall.modules.mlstudio.cdk.mlstudio_stack.SagemakerStudioUserProfile.get_engine', + return_value=db, + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_delegation_role_name', + return_value="dataall-pivot-role-name-pytest", + ) + mocker.patch( + 'dataall.modules.mlstudio.cdk.mlstudio_stack.SagemakerStudioUserProfile.get_target', + return_value=sgm_studio, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_target', + return_value=sgm_studio, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_environment', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_organization', + return_value=org_fixture, + ) + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods_sagemaker_studio_extension(mocker): + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_cdk_look_up_role_arn', + return_value="arn:aws:iam::1111111111:role/cdk-hnb659fds-lookup-role-1111111111-eu-west-1", + ) + mocker.patch( + 'dataall.modules.mlstudio.aws.ec2_client.EC2.check_default_vpc_exists', + return_value=False, + ) + + +def test_resources_sgmstudio_stack_created(sgm_studio): + app = App() + + # Create the Stack + stack = SagemakerStudioUserProfile( + app, 'Domain', target_uri=sgm_studio.sagemakerStudioUserUri + ) + + # Prepare the stack for assertions. + template = Template.from_stack(stack) + + # Assert that we have created a SageMaker user profile + # TODO: Add more assertions + template.resource_count_is("AWS::SageMaker::UserProfile", 1) + + +def test_resources_sgmstudio_extension_stack_created(db, env_fixture): + app = App() + + # Create the Stack + stack = MockEnvironmentSageMakerExtension( + app, 'SagemakerExtension', env=env_fixture, db=db, + ) + + # Prepare the stack for assertions. + template = Template.from_stack(stack) + + # Assert that we have created a SageMaker domain + # TODO: Add more assertions + template.resource_count_is("AWS::SageMaker::Domain", 1) diff --git a/tests/modules/mlstudio/conftest.py b/tests/modules/mlstudio/conftest.py new file mode 100644 index 000000000..433048894 --- /dev/null +++ b/tests/modules/mlstudio/conftest.py @@ -0,0 +1,81 @@ +import pytest + +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser + + +@pytest.fixture(scope='module', autouse=True) +def patch_aws_sagemaker_client(module_mocker): + module_mocker.patch( + 'dataall.modules.mlstudio.services.mlstudio_service.get_sagemaker_studio_domain', + return_value={'DomainId': 'test'}, + ) + + +@pytest.fixture(scope='module', autouse=True) +def env_params(): + yield {'mlStudiosEnabled': 'True'} + + +@pytest.fixture(scope='module') +def sagemaker_studio_user(client, tenant, group, env_fixture) -> SagemakerStudioUser: + response = client.query( + """ + mutation createSagemakerStudioUser($input:NewSagemakerStudioUserInput){ + createSagemakerStudioUser(input:$input){ + sagemakerStudioUserUri + name + label + created + description + SamlAdminGroupName + environmentUri + tags + } + } + """, + input={ + 'label': 'testcreate', + 'SamlAdminGroupName': group.name, + 'environmentUri': env_fixture.environmentUri, + }, + username='alice', + groups=[group.name], + ) + yield response.data.createSagemakerStudioUser + + +@pytest.fixture(scope='module') +def multiple_sagemaker_studio_users(client, db, env_fixture, group): + for i in range(0, 10): + response = client.query( + """ + mutation createSagemakerStudioUser($input:NewSagemakerStudioUserInput){ + createSagemakerStudioUser(input:$input){ + sagemakerStudioUserUri + name + label + created + description + SamlAdminGroupName + environmentUri + tags + } + } + """, + input={ + 'label': f'test{i}', + 'SamlAdminGroupName': group.name, + 'environmentUri': env_fixture.environmentUri, + }, + username='alice', + groups=[group.name], + ) + assert response.data.createSagemakerStudioUser.label == f'test{i}' + assert ( + response.data.createSagemakerStudioUser.SamlAdminGroupName + == group.name + ) + assert ( + response.data.createSagemakerStudioUser.environmentUri + == env_fixture.environmentUri + ) diff --git a/tests/modules/mlstudio/test_sagemaker_studio.py b/tests/modules/mlstudio/test_sagemaker_studio.py new file mode 100644 index 000000000..c55762522 --- /dev/null +++ b/tests/modules/mlstudio/test_sagemaker_studio.py @@ -0,0 +1,69 @@ +from dataall.modules.mlstudio.db.mlstudio_models import SagemakerStudioUser + + +def test_create_sagemaker_studio_user(sagemaker_studio_user, group, env_fixture): + """Testing that the conftest sagemaker studio user has been created correctly""" + assert sagemaker_studio_user.label == 'testcreate' + assert sagemaker_studio_user.SamlAdminGroupName == group.name + assert sagemaker_studio_user.environmentUri == env_fixture.environmentUri + + +def test_list_sagemaker_studio_users(client, env_fixture, db, group, multiple_sagemaker_studio_users): + response = client.query( + """ + query listSagemakerStudioUsers($filter:SagemakerStudioUserFilter!){ + listSagemakerStudioUsers(filter:$filter){ + count + nodes{ + sagemakerStudioUserUri + } + } + } + """, + filter={}, + username='alice', + ) + print(response.data) + assert len(response.data.listSagemakerStudioUsers['nodes']) == 10 + + +def test_nopermissions_list_sagemaker_studio_users( + client, db, group +): + response = client.query( + """ + query listSagemakerStudioUsers($filter:SagemakerStudioUserFilter!){ + listSagemakerStudioUsers(filter:$filter){ + count + nodes{ + sagemakerStudioUserUri + } + } + } + """, + filter={}, + username='bob', + ) + assert len(response.data.listSagemakerStudioUsers['nodes']) == 0 + + +def test_delete_sagemaker_studio_user( + client, db, group, sagemaker_studio_user +): + response = client.query( + """ + mutation deleteSagemakerStudioUser($sagemakerStudioUserUri:String!, $deleteFromAWS:Boolean){ + deleteSagemakerStudioUser(sagemakerStudioUserUri:$sagemakerStudioUserUri, deleteFromAWS:$deleteFromAWS) + } + """, + sagemakerStudioUserUri=sagemaker_studio_user.sagemakerStudioUserUri, + deleteFromAWS=True, + username='alice', + groups=[group.name], + ) + assert response.data + with db.scoped_session() as session: + n = session.query(SagemakerStudioUser).get( + sagemaker_studio_user.sagemakerStudioUserUri + ) + assert not n diff --git a/tests/modules/mlstudio/test_sagemaker_studio_stack.py b/tests/modules/mlstudio/test_sagemaker_studio_stack.py new file mode 100644 index 000000000..3bb8779ae --- /dev/null +++ b/tests/modules/mlstudio/test_sagemaker_studio_stack.py @@ -0,0 +1,18 @@ + +def test_sagemaker_studio_update_stack(client, sagemaker_studio_user, group): + response = client.query( + """ + mutation updateStack($targetUri:String!, $targetType:String!){ + updateStack(targetUri:$targetUri, targetType:$targetType){ + stackUri + targetUri + name + } + } + """, + targetUri=sagemaker_studio_user.sagemakerStudioUserUri, + targetType="mlstudio", + username="alice", + groups=[group.name], + ) + assert response.data.updateStack.targetUri == sagemaker_studio_user.sagemakerStudioUserUri diff --git a/tests/modules/notebooks/__init__.py b/tests/modules/notebooks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/notebooks/cdk/__init__.py b/tests/modules/notebooks/cdk/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/notebooks/cdk/conftest.py b/tests/modules/notebooks/cdk/conftest.py new file mode 100644 index 000000000..596873274 --- /dev/null +++ b/tests/modules/notebooks/cdk/conftest.py @@ -0,0 +1,23 @@ +import pytest + +from dataall.core.environment.db.environment_models import Environment +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook + + +@pytest.fixture(scope='module', autouse=True) +def notebook(db, env_fixture: Environment) -> SagemakerNotebook: + with db.scoped_session() as session: + notebook = SagemakerNotebook( + label='thistable', + NotebookInstanceStatus='RUNNING', + owner='me', + AWSAccountId=env_fixture.AwsAccountId, + region=env_fixture.region, + environmentUri=env_fixture.environmentUri, + RoleArn=env_fixture.EnvironmentDefaultIAMRoleArn, + SamlAdminGroupName=env_fixture.SamlGroupName, + VolumeSizeInGB=32, + InstanceType='ml.t3.medium', + ) + session.add(notebook) + yield notebook diff --git a/tests/modules/notebooks/cdk/test_sagemaker_notebook_stack.py b/tests/modules/notebooks/cdk/test_sagemaker_notebook_stack.py new file mode 100644 index 000000000..e46186cf7 --- /dev/null +++ b/tests/modules/notebooks/cdk/test_sagemaker_notebook_stack.py @@ -0,0 +1,48 @@ +import json + +import pytest +from aws_cdk import App + +from dataall.modules.notebooks.cdk.notebook_stack import NotebookStack + + +@pytest.fixture(scope='function', autouse=True) +def patch_methods(mocker, db, notebook, env_fixture, org_fixture): + mocker.patch( + 'dataall.modules.notebooks.cdk.notebook_stack.NotebookStack.get_engine', + return_value=db + ) + mocker.patch( + 'dataall.base.aws.sts.SessionHelper.get_delegation_role_name', + return_value="dataall-pivot-role-name-pytest", + ) + mocker.patch( + 'dataall.modules.notebooks.cdk.notebook_stack.NotebookStack.get_target', + return_value=notebook, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_engine', return_value=db + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_target', + return_value=notebook, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_environment', + return_value=env_fixture, + ) + mocker.patch( + 'dataall.core.stacks.services.runtime_stacks_tagging.TagsUtil.get_organization', + return_value=org_fixture, + ) + + +@pytest.fixture(scope='function', autouse=True) +def template(notebook): + app = App() + NotebookStack(app, 'SagemakerNotebook', target_uri=notebook.notebookUri) + return json.dumps(app.synth().get_stack_by_name('SagemakerNotebook').template) + + +def test_resources_created(template): + assert 'AWS::SageMaker::NotebookInstance' in template diff --git a/tests/modules/notebooks/conftest.py b/tests/modules/notebooks/conftest.py new file mode 100644 index 000000000..a933cdf4a --- /dev/null +++ b/tests/modules/notebooks/conftest.py @@ -0,0 +1,63 @@ +import pytest + +from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook + + +class MockSagemakerClient: + def start_instance(self): + return "Starting" + + def stop_instance(self): + return True + + def get_notebook_instance_status(self): + return "INSERVICE" + + +@pytest.fixture(scope='module', autouse=True) +def patch_aws(module_mocker): + module_mocker.patch( + "dataall.modules.notebooks.services.notebook_service.client", + return_value=MockSagemakerClient(), + ) + + +@pytest.fixture(scope='module', autouse=True) +def env_params(): + yield {'notebooksEnabled': 'True'} + + +@pytest.fixture(scope='module') +def sgm_notebook(client, tenant, group, env_fixture) -> SagemakerNotebook: + response = client.query( + """ + mutation createSagemakerNotebook($input:NewSagemakerNotebookInput){ + createSagemakerNotebook(input:$input){ + notebookUri + label + description + tags + owner + userRoleForNotebook + SamlAdminGroupName + VpcId + SubnetId + VolumeSizeInGB + InstanceType + } + } + """, + input={ + 'label': 'my best notebook ever', + 'SamlAdminGroupName': group.name, + 'tags': [group.name], + 'environmentUri': env_fixture.environmentUri, + 'VpcId': 'vpc-123567', + 'SubnetId': 'subnet-123567', + 'VolumeSizeInGB': 32, + 'InstanceType': 'ml.m5.xlarge', + }, + username='alice', + groups=[group.name], + ) + yield response.data.createSagemakerNotebook diff --git a/tests/modules/notebooks/test_notebook_stack.py b/tests/modules/notebooks/test_notebook_stack.py new file mode 100644 index 000000000..e21a163d4 --- /dev/null +++ b/tests/modules/notebooks/test_notebook_stack.py @@ -0,0 +1,6 @@ +from tests.core.stacks.test_stack import update_stack_query + + +def test_notebook_stack(client, sgm_notebook, group): + response = update_stack_query(client, sgm_notebook.notebookUri, 'notebook', group.name) + assert response.data.updateStack.targetUri == sgm_notebook.notebookUri diff --git a/tests/modules/notebooks/test_sagemaker_notebook.py b/tests/modules/notebooks/test_sagemaker_notebook.py new file mode 100644 index 000000000..886ef8e0b --- /dev/null +++ b/tests/modules/notebooks/test_sagemaker_notebook.py @@ -0,0 +1,157 @@ +import pytest + + +def test_sgm_notebook(sgm_notebook, group): + assert sgm_notebook.notebookUri + assert sgm_notebook.SamlAdminGroupName == group.name + assert sgm_notebook.VpcId == 'vpc-123567' + assert sgm_notebook.SubnetId == 'subnet-123567' + assert sgm_notebook.InstanceType == 'ml.m5.xlarge' + assert sgm_notebook.VolumeSizeInGB == 32 + + +def test_list_notebooks(client, user, group, sgm_notebook): + query = """ + query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ + listSagemakerNotebooks(filter:$filter){ + count + nodes{ + NotebookInstanceStatus + notebookUri + environment { + environmentUri + } + organization { + organizationUri + } + } + } + } + """ + + response = client.query( + query, + filter=None, + username=user.username, + groups=[group.name], + ) + + assert len(response.data.listSagemakerNotebooks['nodes']) == 1 + + response = client.query( + query, + filter={"term": "my best"}, + username=user.username, + groups=[group.name], + ) + + assert len(response.data.listSagemakerNotebooks['nodes']) == 1 + + +def test_nopermissions_list_notebooks(client, user2, group2, sgm_notebook): + response = client.query( + """ + query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ + listSagemakerNotebooks(filter:$filter){ + count + nodes{ + NotebookInstanceStatus + notebookUri + environment { + environmentUri + } + organization { + organizationUri + } + } + } + } + """, + filter=None, + username=user2.username, + groups=[group2.name], + ) + assert len(response.data.listSagemakerNotebooks['nodes']) == 0 + + +def test_get_notebook(client, user, group, sgm_notebook): + + response = client.query( + """ + query getSagemakerNotebook($notebookUri:String!){ + getSagemakerNotebook(notebookUri:$notebookUri){ + notebookUri + NotebookInstanceStatus + } + } + """, + notebookUri=sgm_notebook.notebookUri, + username=user.username, + groups=[group.name], + ) + assert response.data.getSagemakerNotebook.notebookUri == sgm_notebook.notebookUri + + +def test_action_notebook(client, user, group, sgm_notebook): + response = client.query( + """ + mutation stopSagemakerNotebook($notebookUri:String!){ + stopSagemakerNotebook(notebookUri:$notebookUri) + } + """, + notebookUri=sgm_notebook.notebookUri, + username=user.username, + groups=[group.name], + ) + assert response.data.stopSagemakerNotebook == 'Stopping' + + response = client.query( + """ + mutation startSagemakerNotebook($notebookUri:String!){ + startSagemakerNotebook(notebookUri:$notebookUri) + } + """, + notebookUri=sgm_notebook.notebookUri, + username=user.username, + groups=[group.name], + ) + assert response.data.startSagemakerNotebook == 'Starting' + + +def test_delete_notebook(client, user, group, sgm_notebook): + + response = client.query( + """ + mutation deleteSagemakerNotebook($notebookUri:String!,$deleteFromAWS:Boolean){ + deleteSagemakerNotebook(notebookUri:$notebookUri,deleteFromAWS:$deleteFromAWS) + } + """, + notebookUri=sgm_notebook.notebookUri, + deleteFromAWS=True, + username=user.username, + groups=[group.name], + ) + assert response.data.deleteSagemakerNotebook + response = client.query( + """ + query ListSagemakerNotebooks($filter:SagemakerNotebookFilter){ + listSagemakerNotebooks(filter:$filter){ + count + nodes{ + NotebookInstanceStatus + notebookUri + environment { + environmentUri + } + organization { + organizationUri + } + } + } + } + """, + filter=None, + username=user.username, + groups=[group.name], + ) + assert len(response.data.listSagemakerNotebooks['nodes']) == 0 diff --git a/tests/modules/test_loader.py b/tests/modules/test_loader.py new file mode 100644 index 000000000..9c5682dbc --- /dev/null +++ b/tests/modules/test_loader.py @@ -0,0 +1,160 @@ +from abc import ABC +from typing import List, Type, Set + +import pytest + +from dataall.base.loader import ModuleInterface, ImportMode +from dataall.base import loader + +order = [] + + +class TestModule(ModuleInterface, ABC): + def __init__(self): + order.append(self.__class__) + + @classmethod + def name(cls) -> str: + return cls.__name__ + + +class TestApiModule(TestModule): + @staticmethod + def is_supported(modes: Set[ImportMode]) -> bool: + return ImportMode.API in modes + + +class AModule(TestApiModule): + pass + + +class BModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [AModule] + + +class CModule(TestModule): + @staticmethod + def is_supported(modes: List[ImportMode]) -> bool: + return ImportMode.CDK in modes + + +class DModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [BModule] + + +class EModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [BModule] + + +class FModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [EModule] + + +class GModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [AModule, BModule] + + +class IModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [EModule, DModule] + + +class JModule(TestApiModule): + pass + + +class KModule(TestApiModule): + @staticmethod + def depends_on() -> List[Type['ModuleInterface']]: + return [JModule, EModule] + + +@pytest.fixture(scope='module', autouse=True) +def patch_prefix(): + prefix = loader._MODULE_PREFIX + loader._MODULE_PREFIX = 'tests.modules.test_loader' + yield + + loader._MODULE_PREFIX = prefix + + +@pytest.fixture(scope='function', autouse=True) +def clean_order(): + yield + order.clear() + + +def patch_loading(mocker, all_modules, in_config): + mocker.patch( + 'dataall.base.loader._all_modules', + return_value=all_modules, + ) + mocker.patch( + 'dataall.base.loader._load_modules', + return_value=({module.name() for module in in_config}, {}) + ) + + +@pytest.fixture(scope="function", autouse=True) +def patch_modes(mocker): + mocker.patch( + 'dataall.base.loader._ACTIVE_MODES', set() + ) + yield + + +def test_nothing_to_load(mocker): + patch_loading(mocker, [], set()) + loader.load_modules({ImportMode.API, ImportMode.CDK}) + assert len(order) == 0 + + +def test_import_with_one_dependency(mocker): + patch_loading(mocker, [AModule, BModule], {BModule}) + loader.load_modules({ImportMode.API}) + assert order == [AModule, BModule] + + +def test_load_with_cdk_mode(mocker): + patch_loading(mocker, [DModule, CModule, BModule], {CModule}) + loader.load_modules({ImportMode.CDK}) + assert order == [CModule] + + +def test_many_nested_layers(mocker): + patch_loading(mocker, [BModule, CModule, AModule, DModule], {DModule, CModule}) + loader.load_modules({ImportMode.API}) + correct_order = [AModule, BModule, DModule] + assert order == correct_order + assert CModule not in correct_order + + +def test_complex_loading(mocker): + patch_loading(mocker, [ + AModule, BModule, CModule, DModule, EModule, FModule, GModule, IModule, JModule, KModule + ], {CModule, FModule, GModule, IModule, KModule}) + + loader.load_modules({ImportMode.API}) + assert order == [AModule, JModule, BModule, DModule, EModule, GModule, FModule, IModule, KModule] + + +def test_incorrect_loading(mocker): + patch_loading(mocker, [CModule], set()) # A is not specified in config, but was found + with pytest.raises(ImportError): + loader.load_modules({ImportMode.CDK}) + + patch_loading(mocker, [AModule, BModule], {AModule}) + with pytest.raises(ImportError): + loader.load_modules({ImportMode.API}) + diff --git a/tests/modules/vote/__init__.py b/tests/modules/vote/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/vote/test_vote.py b/tests/modules/vote/test_vote.py new file mode 100644 index 000000000..69183f150 --- /dev/null +++ b/tests/modules/vote/test_vote.py @@ -0,0 +1,54 @@ + +def count_votes_query(client, target_uri, target_type, group): + response = client.query( + """ + query countUpVotes($targetUri:String!, $targetType:String!){ + countUpVotes(targetUri:$targetUri, targetType:$targetType) + } + """, + targetUri=target_uri, + targetType=target_type, + username='alice', + groups=[group], + ) + return response + + +def get_vote_query(client, target_uri, target_type, group): + response = client.query( + """ + query getVote($targetUri:String!, $targetType:String!){ + getVote(targetUri:$targetUri, targetType:$targetType){ + upvote + } + } + """, + targetUri=target_uri, + targetType=target_type, + username='alice', + groups=[group], + ) + return response + + +def upvote_mutation(client, target_uri, target_type, upvote, group): + response = client.query( + """ + mutation upVote($input:VoteInput!){ + upVote(input:$input){ + voteUri + targetUri + targetType + upvote + } + } + """, + input=dict( + targetUri=target_uri, + targetType=target_type, + upvote=upvote, + ), + username='alice', + groups=[group], + ) + return response diff --git a/tests/modules/worksheets/__init__.py b/tests/modules/worksheets/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/modules/worksheets/test_worksheet.py b/tests/modules/worksheets/test_worksheet.py new file mode 100644 index 000000000..0f3b10e41 --- /dev/null +++ b/tests/modules/worksheets/test_worksheet.py @@ -0,0 +1,147 @@ +import pytest + +from dataall.modules.worksheets.api.resolvers import WorksheetRole + + +@pytest.fixture(scope='module', autouse=True) +def worksheet(client, tenant, group): + response = client.query( + """ + mutation CreateWorksheet ($input:NewWorksheetInput){ + createWorksheet(input:$input){ + worksheetUri + label + description + tags + owner + userRoleForWorksheet + } + } + """, + input={ + 'label': 'my worksheet', + 'SamlAdminGroupName': group.name, + 'tags': [group.name], + }, + username='alice', + groups=[group.name], + tags=[group.name], + ) + return response.data.createWorksheet + + +def test_create_worksheet(client, worksheet): + assert worksheet.label == 'my worksheet' + assert worksheet.owner == 'alice' + assert worksheet.userRoleForWorksheet == WorksheetRole.Creator.name + + +def test_list_worksheets_as_creator(client, group): + response = client.query( + """ + query ListWorksheets ($filter:WorksheetFilter){ + listWorksheets (filter:$filter){ + count + page + pages + nodes{ + worksheetUri + label + description + tags + owner + userRoleForWorksheet + } + } + } + """, + filter={'page': 1}, + username='alice', + groups=[group.name], + ) + + assert response.data.listWorksheets.count == 1 + + +def test_list_worksheets_as_anonymous(client, group): + response = client.query( + """ + query ListWorksheets ($filter:WorksheetFilter){ + listWorksheets (filter:$filter){ + count + page + pages + nodes{ + worksheetUri + label + description + tags + owner + userRoleForWorksheet + } + } + } + """, + filter={'page': 1}, + username='anonymous', + ) + + print(response) + assert response.data.listWorksheets.count == 0 + + +def test_get_worksheet(client, worksheet, group): + response = client.query( + """ + query GetWorksheet($worksheetUri:String!){ + getWorksheet(worksheetUri:$worksheetUri){ + label + description + userRoleForWorksheet + } + } + """, + worksheetUri=worksheet.worksheetUri, + username='alice', + groups=[group.name], + ) + + assert response.data.getWorksheet.userRoleForWorksheet == WorksheetRole.Creator.name + + response = client.query( + """ + query GetWorksheet($worksheetUri:String!){ + getWorksheet(worksheetUri:$worksheetUri){ + label + description + userRoleForWorksheet + } + } + """, + worksheetUri=worksheet.worksheetUri, + username='anonymous', + ) + + assert 'Unauthorized' in response.errors[0].message + + +def test_update_worksheet(client, worksheet, group): + response = client.query( + """ + mutation UpdateWorksheet($worksheetUri:String!, $input:UpdateWorksheetInput){ + updateWorksheet( + worksheetUri:$worksheetUri, + input:$input + ){ + worksheetUri + label + } + } + """, + worksheetUri=worksheet.worksheetUri, + input={'label': 'change label'}, + username='alice', + groups=[group.name], + ) + + assert response.data.updateWorksheet.label == 'change label' diff --git a/tests/requirements.txt b/tests/requirements.txt index d68c8d230..681f68094 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,6 +1,7 @@ munch==2.5.0 -pytest==6.2.5 +pytest==7.3.1 pytest-cov==3.0.0 pytest-mock==3.6.1 pytest-dependency==0.5.1 -werkzeug==2.2.3 \ No newline at end of file +werkzeug==2.2.3 +deprecated==1.2.13 \ No newline at end of file diff --git a/tests/searchproxy/test_indexers.py b/tests/searchproxy/test_indexers.py deleted file mode 100644 index 478c8bf3c..000000000 --- a/tests/searchproxy/test_indexers.py +++ /dev/null @@ -1,154 +0,0 @@ -import typing - -import pytest - -import dataall -from dataall.searchproxy import indexers - - -@pytest.fixture(scope='module', autouse=True) -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization='Owner', - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - imported=True, - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(org, env, db, dataset): - with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( - datasetUri=dataset.datasetUri, - AWSAccountId='12345678901', - S3Prefix='S3prefix', - label='label', - owner='foo', - name='name', - GlueTableName='table1', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - region='eu-west-1', - ) - session.add(table) - yield table - - -@pytest.fixture(scope='module', autouse=True) -def folder(org, env, db, dataset): - with db.scoped_session() as session: - location = dataall.db.models.DatasetStorageLocation( - datasetUri=dataset.datasetUri, - AWSAccountId='12345678901', - S3Prefix='S3prefix', - label='label', - owner='foo', - name='name', - S3BucketName='S3BucketName', - region='eu-west-1', - ) - session.add(location) - yield location - - -def test_es_request(): - body = '{"preference":"SearchResult"}\n{"query":{"match_all":{}},"size":8,"_source":{"includes":["*"],"excludes":[]},"from":0}\n' - body = body.split('\n') - assert ( - body[1] - == '{"query":{"match_all":{}},"size":8,"_source":{"includes":["*"],"excludes":[]},"from":0}' - ) - import json - - assert json.loads(body[1]) == { - 'query': {'match_all': {}}, - 'size': 8, - '_source': {'includes': ['*'], 'excludes': []}, - 'from': 0, - } - - -def test_upsert_dataset(db, dataset, env, mocker): - mocker.patch('dataall.searchproxy.upsert', return_value={}) - with db.scoped_session() as session: - dataset_indexed = indexers.upsert_dataset( - session, es={}, datasetUri=dataset.datasetUri - ) - assert dataset_indexed.datasetUri == dataset.datasetUri - - -def test_upsert_table(db, dataset, env, mocker, table): - mocker.patch('dataall.searchproxy.upsert', return_value={}) - with db.scoped_session() as session: - table_indexed = indexers.upsert_table(session, es={}, tableUri=table.tableUri) - assert table_indexed.uri == table.tableUri - - -def test_upsert_folder(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) - with db.scoped_session() as session: - folder_indexed = indexers.upsert_folder( - session, es={}, locationUri=folder.locationUri - ) - assert folder_indexed.uri == folder.locationUri - - -def test_upsert_tables(db, dataset, env, mocker, folder): - mocker.patch('dataall.searchproxy.upsert', return_value={}) - with db.scoped_session() as session: - tables = indexers.upsert_dataset_tables( - session, es={}, datasetUri=dataset.datasetUri - ) - assert len(tables) == 1 diff --git a/tests/tasks/conftest.py b/tests/tasks/conftest.py deleted file mode 100644 index 826ae524f..000000000 --- a/tests/tasks/conftest.py +++ /dev/null @@ -1,238 +0,0 @@ -import boto3 -import os -import pytest - -from dataall.db import models -from dataall.api import constants - - -@pytest.fixture(scope="module") -def group(db): - with db.scoped_session() as session: - group = models.Group(name="bobteam", label="bobteam", owner="alice") - session.add(group) - yield group - - -@pytest.fixture(scope="module") -def group2(db): - with db.scoped_session() as session: - group = models.Group(name="bobteam2", label="bobteam2", owner="alice2") - session.add(group) - yield group - - -@pytest.fixture(scope="module") -def org(db): - def factory(label: str, owner: str, SamlGroupName: str) -> models.Organization: - with db.scoped_session() as session: - org = models.Organization( - label=label, - owner=owner, - tags=[], - description="desc", - SamlGroupName=SamlGroupName, - ) - session.add(org) - session.commit() - return org - - yield factory - - -@pytest.fixture(scope="module") -def environment(db): - def factory( - organization: models.Organization, - awsAccountId: str, - label: str, - owner: str, - samlGroupName: str, - environmentDefaultIAMRoleName: str, - dashboardsEnabled: bool = False, - ) -> models.Environment: - with db.scoped_session() as session: - env = models.Environment( - organizationUri=organization.organizationUri, - AwsAccountId=awsAccountId, - region="eu-central-1", - label=label, - owner=owner, - tags=[], - description="desc", - SamlGroupName=samlGroupName, - EnvironmentDefaultIAMRoleName=environmentDefaultIAMRoleName, - EnvironmentDefaultIAMRoleArn=f"arn:aws:iam::{awsAccountId}:role/{environmentDefaultIAMRoleName}", - CDKRoleArn=f"arn:aws::{awsAccountId}:role/EnvRole", - dashboardsEnabled=dashboardsEnabled, - ) - session.add(env) - session.commit() - return env - - yield factory - - -@pytest.fixture(scope="module") -def environment_group(db): - def factory( - environment: models.Environment, - group: models.Group, - ) -> models.EnvironmentGroup: - with db.scoped_session() as session: - - env_group = models.EnvironmentGroup( - environmentUri=environment.environmentUri, - groupUri=group.groupUri, - environmentIAMRoleArn=environment.EnvironmentDefaultIAMRoleArn, - environmentIAMRoleName=environment.EnvironmentDefaultIAMRoleName, - environmentAthenaWorkGroup="workgroup", - ) - session.add(env_group) - session.commit() - return env_group - - yield factory - - -@pytest.fixture(scope="module") -def dataset(db): - def factory( - organization: models.Organization, - environment: models.Environment, - label: str, - ) -> models.Dataset: - with db.scoped_session() as session: - dataset = models.Dataset( - organizationUri=organization.organizationUri, - environmentUri=environment.environmentUri, - label=label, - owner=environment.owner, - SamlAdminGroupName=environment.SamlGroupName, - businessOwnerDelegationEmails=["foo@amazon.com"], - name=label, - S3BucketName=label, - GlueDatabaseName="gluedatabase", - KmsAlias="kmsalias", - AwsAccountId=environment.AwsAccountId, - region=environment.region, - IAMDatasetAdminUserArn=f"arn:aws:iam::{environment.AwsAccountId}:user/dataset", - IAMDatasetAdminRoleArn=f"arn:aws:iam::{environment.AwsAccountId}:role/dataset", - ) - session.add(dataset) - session.commit() - return dataset - - yield factory - - -@pytest.fixture(scope="module") -def location(db): - def factory(dataset: models.Dataset, label: str) -> models.DatasetStorageLocation: - - with db.scoped_session() as session: - ds_location = models.DatasetStorageLocation( - name=label, - label=label, - owner=dataset.owner, - datasetUri=dataset.datasetUri, - S3BucketName=dataset.S3BucketName, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3Prefix=f"{label}", - ) - session.add(ds_location) - return ds_location - - yield factory - - -@pytest.fixture(scope='module') -def table(db): - def factory(dataset: models.Dataset, label: str) -> models.DatasetTable: - - with db.scoped_session() as session: - table = models.DatasetTable( - name=label, - label=label, - owner=dataset.owner, - datasetUri=dataset.datasetUri, - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableName=label, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, - S3Prefix=f'{label}', - ) - session.add(table) - return table - - yield factory - - -@pytest.fixture(scope="module") -def share(db): - def factory( - dataset: models.Dataset, - environment: models.Environment, - env_group: models.EnvironmentGroup - ) -> models.ShareObject: - with db.scoped_session() as session: - share = models.ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=environment.environmentUri, - owner="bob", - principalId=environment.SamlGroupName, - principalType=constants.PrincipalType.Group.value, - principalIAMRoleName=env_group.environmentIAMRoleName, - status=constants.ShareObjectStatus.Approved.value, - ) - session.add(share) - session.commit() - return share - - yield factory - - -@pytest.fixture(scope="module") -def share_item_folder(db): - def factory( - share: models.ShareObject, - location: models.DatasetStorageLocation, - ) -> models.ShareObjectItem: - with db.scoped_session() as session: - share_item = models.ShareObjectItem( - shareUri=share.shareUri, - owner="alice", - itemUri=location.locationUri, - itemType=constants.ShareableType.StorageLocation.value, - itemName=location.name, - status=constants.ShareItemStatus.Share_Approved.value, - ) - session.add(share_item) - session.commit() - return share_item - - yield factory - -@pytest.fixture(scope="module") -def share_item_table(db): - def factory( - share: models.ShareObject, - table: models.DatasetTable, - status: str, - ) -> models.ShareObjectItem: - with db.scoped_session() as session: - share_item = models.ShareObjectItem( - shareUri=share.shareUri, - owner="alice", - itemUri=table.tableUri, - itemType=constants.ShareableType.Table.value, - itemName=table.name, - status=status, - ) - session.add(share_item) - session.commit() - return share_item - - yield factory diff --git a/tests/tasks/test_catalog_indexer.py b/tests/tasks/test_catalog_indexer.py deleted file mode 100644 index 77090b2d4..000000000 --- a/tests/tasks/test_catalog_indexer.py +++ /dev/null @@ -1,94 +0,0 @@ -import pytest -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization='Owner', - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def sync_dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(org, env, db, sync_dataset): - with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( - datasetUri=sync_dataset.datasetUri, - AWSAccountId='12345678901', - S3Prefix='S3prefix', - label='label', - owner='foo', - name='name', - GlueTableName='table1', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - region='eu-west-1', - ) - session.add(table) - yield table - - -def test_catalog_indexer(db, org, env, sync_dataset, table, mocker): - mocker.patch( - 'dataall.searchproxy.indexers.upsert_dataset_tables', return_value=[table] - ) - mocker.patch( - 'dataall.searchproxy.indexers.upsert_dataset', return_value=sync_dataset - ) - indexed_objects_counter = dataall.tasks.catalog_indexer.index_objects( - engine=db, es=True - ) - assert indexed_objects_counter == 2 diff --git a/tests/tasks/test_lf_share_manager.py b/tests/tasks/test_lf_share_manager.py deleted file mode 100644 index bee190258..000000000 --- a/tests/tasks/test_lf_share_manager.py +++ /dev/null @@ -1,701 +0,0 @@ -""" -Testing LF manager class methods invoked in same account and cross account LF share processors. -Remarks - -""" -import boto3 -import pytest - -from typing import Callable - -from dataall.db import models -from dataall.api import constants - -from dataall.tasks.data_sharing.share_processors.lf_process_cross_account_share import ProcessLFCrossAccountShare -from dataall.tasks.data_sharing.share_processors.lf_process_same_account_share import ProcessLFSameAccountShare -from dataall.utils.alarm_service import AlarmService - - -SOURCE_ENV_ACCOUNT = "1" * 12 -SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" - - -TARGET_ACCOUNT_ENV = "2" * 12 -TARGET_ACCOUNT_ENV_ROLE_NAME = "dataall-ConsumersEnvironment-r71ucp4m" - - -@pytest.fixture(scope="module") -def org1(org: Callable) -> models.Organization: - yield org( - label="org", - owner="alice", - SamlGroupName="admins" - ) - - -@pytest.fixture(scope="module") -def source_environment(environment: Callable, org1: models.Organization, group: models.Group) -> models.Environment: - yield environment( - organization=org1, - awsAccountId=SOURCE_ENV_ACCOUNT, - label="source_environment", - owner=group.owner, - samlGroupName=group.name, - environmentDefaultIAMRoleName=SOURCE_ENV_ROLE_NAME, - ) - - -@pytest.fixture(scope="module") -def source_environment_group(environment_group: Callable, source_environment: models.Environment, - group: models.Group) -> models.EnvironmentGroup: - yield environment_group( - environment=source_environment, - group=group - ) - - -@pytest.fixture(scope="module") -def source_environment_group_requesters(environment_group: Callable, source_environment: models.Environment, - group2: models.Group) -> models.EnvironmentGroup: - yield environment_group( - environment=source_environment, - group=group2 - ) - - -@pytest.fixture(scope="module") -def target_environment(environment: Callable, org1: models.Organization, group2: models.Group) -> models.Environment: - yield environment( - organization=org1, - awsAccountId=TARGET_ACCOUNT_ENV, - label="target_environment", - owner=group2.owner, - samlGroupName=group2.name, - environmentDefaultIAMRoleName=TARGET_ACCOUNT_ENV_ROLE_NAME, - ) - - -@pytest.fixture(scope="module") -def target_environment_group(environment_group: Callable, target_environment: models.Environment, - group2: models.Group) -> models.EnvironmentGroup: - yield environment_group( - environment=target_environment, - group=group2 - ) - - -@pytest.fixture(scope="module") -def dataset1(dataset: Callable, org1: models.Organization, source_environment: models.Environment) -> models.Dataset: - yield dataset( - organization=org1, - environment=source_environment, - label="dataset1" - ) - - -@pytest.fixture(scope="module") -def table1(table: Callable, dataset1: models.Dataset) -> models.DatasetTable: - yield table( - dataset=dataset1, - label="table1" - ) - - -@pytest.fixture(scope="module") -def table2(table: Callable, dataset1: models.Dataset) -> models.DatasetTable: - yield table( - dataset=dataset1, - label="table2" - ) - - -@pytest.fixture(scope="module") -def share_same_account( - share: Callable, dataset1: models.Dataset, source_environment: models.Environment, - source_environment_group_requesters: models.EnvironmentGroup) -> models.ShareObject: - yield share( - dataset=dataset1, - environment=source_environment, - env_group=source_environment_group_requesters - ) - - -@pytest.fixture(scope="module") -def share_cross_account( - share: Callable, dataset1: models.Dataset, target_environment: models.Environment, - target_environment_group: models.EnvironmentGroup) -> models.ShareObject: - yield share( - dataset=dataset1, - environment=target_environment, - env_group=target_environment_group - ) - - -@pytest.fixture(scope="module") -def share_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table1: models.DatasetTable) -> models.ShareObjectItem: - yield share_item_table( - share=share_same_account, - table=table1, - status=constants.ShareItemStatus.Share_Approved.value - ) - -@pytest.fixture(scope="module") -def revoke_item_same_account(share_item_table: Callable, share_same_account: models.ShareObject, - table2: models.DatasetTable) -> models.ShareObjectItem: - yield share_item_table( - share=share_same_account, - table=table2, - status=constants.ShareItemStatus.Revoke_Approved.value - ) - -@pytest.fixture(scope="module") -def share_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table1: models.DatasetTable) -> models.ShareObjectItem: - yield share_item_table( - share=share_cross_account, - table=table1, - status=constants.ShareItemStatus.Share_Approved.value - ) - -@pytest.fixture(scope="module") -def revoke_item_cross_account(share_item_table: Callable, share_cross_account: models.ShareObject, - table2: models.DatasetTable) -> models.ShareObjectItem: - yield share_item_table( - share=share_cross_account, - table=table2, - status=constants.ShareItemStatus.Revoke_Approved.value - ) - -@pytest.fixture(scope="module", autouse=True) -def processor_cross_account(db, dataset1, share_cross_account, table1, table2, source_environment, target_environment, - target_environment_group): - with db.scoped_session() as session: - processor = ProcessLFCrossAccountShare( - session, - dataset1, - share_cross_account, - [table1], - [table2], - source_environment, - target_environment, - target_environment_group, - ) - yield processor - -@pytest.fixture(scope="module", autouse=True) -def processor_same_account(db, dataset1, share_same_account, table1, source_environment, - source_environment_group_requesters): - with db.scoped_session() as session: - processor = ProcessLFSameAccountShare( - session, - dataset1, - share_same_account, - [table1], - [table2], - source_environment, - source_environment, - source_environment_group_requesters, - ) - yield processor - - -def test_init(processor_same_account, processor_cross_account): - assert processor_same_account.dataset - assert processor_same_account.share - - -def test_build_shared_db_name( - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - dataset1: models.Dataset, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, -): - # Given a dataset and its share, build db_share name - # Then, it should return - assert processor_same_account.build_shared_db_name() == (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254] - assert processor_cross_account.build_shared_db_name() == (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254] - - -def test_get_share_principals( - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - source_environment: models.Environment, - target_environment: models.Environment, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, -): - # Given a dataset and its share, build db_share name - # Then, it should return - assert processor_same_account.get_share_principals() == [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] - assert processor_cross_account.get_share_principals() == [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] - - -def test_create_shared_database( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - mocker, -): - create_db_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.create_database", - return_value=True, - ) - lf_mock_pr = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_pivot_role_all_database_permissions", - return_value=True, - ) - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.remote_session", - return_value=boto3.Session(), - ) - lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_permissions_to_database", - return_value=True, - ) - # When - processor_same_account.create_shared_database( - target_environment=source_environment, - dataset=dataset1, - shared_db_name=(dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], - principals=[f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] - ) - - # Then - create_db_mock.assert_called_once() - lf_mock_pr.assert_called_once() - lf_mock.assert_called_once() - - # Reset mocks - create_db_mock.reset_mock() - lf_mock_pr.reset_mock() - lf_mock.reset_mock() - - # When - processor_cross_account.create_shared_database( - target_environment=target_environment, - dataset=dataset1, - shared_db_name=(dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], - principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] - ) - - # Then - create_db_mock.assert_called_once() - lf_mock_pr.assert_called_once() - lf_mock.assert_called_once() - -def test_check_share_item_exists_on_glue_catalog( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - table1: models.DatasetTable, - share_item_same_account: models.ShareObjectItem, - share_item_cross_account: models.ShareObjectItem, - mocker, -): - - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) - # When - processor_same_account.check_share_item_exists_on_glue_catalog( - share_item=share_item_same_account, - table=table1 - ) - # Then - glue_mock.assert_called_once() - glue_mock.reset_mock() - - # When - processor_cross_account.check_share_item_exists_on_glue_catalog( - share_item=share_item_cross_account, - table=table1 - ) - # Then - glue_mock.assert_called_once() - - - -def test_build_share_data( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table1: models.DatasetTable, -): - data_same_account = { - 'source': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'database': table1.GlueDatabaseName, - 'tablename': table1.GlueTableName, - }, - 'target': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'principals': [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"], - 'database': (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], - }, - } - - data = processor_same_account.build_share_data(table=table1) - assert data == data_same_account - - data_cross_account = { - 'source': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'database': table1.GlueDatabaseName, - 'tablename': table1.GlueTableName, - }, - 'target': { - 'accountid': target_environment.AwsAccountId, - 'region': target_environment.region, - 'principals': [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], - 'database': (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], - }, - } - - data = processor_cross_account.build_share_data(table=table1) - assert data == data_cross_account - - -def test_create_resource_link( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table1: models.DatasetTable, - mocker, -): - sts_mock = mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.remote_session", - return_value=boto3.Session(), - ) - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.create_resource_link", - return_value=True, - ) - lf_mock_1 = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_resource_link_permission", - return_value=True, - ) - lf_mock_2 = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.grant_resource_link_permission_on_target", - return_value=True, - ) - - # When - data_same_account = { - 'source': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'database': table1.GlueDatabaseName, - 'tablename': table1.GlueTableName, - }, - 'target': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'principals': [f"arn:aws:iam::{source_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"], - 'database': (dataset1.GlueDatabaseName + '_shared_' + share_same_account.shareUri)[:254], - }, - } - processor_same_account.create_resource_link(**data_same_account) - - # Then - sts_mock.assert_called_once() - glue_mock.assert_called_once() - lf_mock_1.assert_called_once() - lf_mock_2.assert_called_once() - - # Reset mocks - sts_mock.reset_mock() - glue_mock.reset_mock() - lf_mock_1.reset_mock() - lf_mock_2.reset_mock() - - - data_cross_account = { - 'source': { - 'accountid': source_environment.AwsAccountId, - 'region': source_environment.region, - 'database': table1.GlueDatabaseName, - 'tablename': table1.GlueTableName, - }, - 'target': { - 'accountid': target_environment.AwsAccountId, - 'region': target_environment.region, - 'principals': [f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], - 'database': (dataset1.GlueDatabaseName + '_shared_' + share_cross_account.shareUri)[:254], - }, - } - processor_cross_account.create_resource_link(**data_cross_account) - - # Then - sts_mock.assert_called_once() - glue_mock.assert_called_once() - lf_mock_1.assert_called_once() - lf_mock_2.assert_called_once() - - pass - -def test_revoke_table_resource_link_access( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table2: models.DatasetTable, - mocker, -): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.remote_session", - return_value=boto3.Session(), - ) - - lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.batch_revoke_permissions", - return_value=True, - ) - - processor_same_account.revoke_table_resource_link_access( - table=table2, - principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] - ) - # Then - glue_mock.assert_called_once() - lf_mock.assert_called_once() - - # Reset mocks - glue_mock.reset_mock() - lf_mock.reset_mock() - - processor_cross_account.revoke_table_resource_link_access( - table=table2, - principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"], - ) - # Then - glue_mock.assert_called_once() - lf_mock.assert_called_once() - - -def test_revoke_source_table_access( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table2: models.DatasetTable, - mocker, -): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) - - lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.revoke_source_table_access", - return_value=True, - ) - - processor_same_account.revoke_source_table_access( - table=table2, - principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_same_account.principalIAMRoleName}"] - ) - # Then - glue_mock.assert_called_once() - lf_mock.assert_called_once() - - # Reset mocks - glue_mock.reset_mock() - lf_mock.reset_mock() - - processor_cross_account.revoke_source_table_access( - table=table2, - principals=[f"arn:aws:iam::{target_environment.AwsAccountId}:role/{share_cross_account.principalIAMRoleName}"] - ) - # Then - glue_mock.assert_called_once() - lf_mock.assert_called_once() - - -def test_delete_resource_link_table( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table2: models.DatasetTable, - mocker, -): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.table_exists", - return_value=True, - ) - - glue_mock2 = mocker.patch( - "dataall.aws.handlers.glue.Glue.delete_table", - return_value=True, - ) - - - processor_same_account.delete_resource_link_table( - table=table2 - ) - # Then - glue_mock.assert_called_once() - glue_mock2.assert_called_once() - - # Reset mocks - glue_mock.reset_mock() - glue_mock2.reset_mock() - - processor_cross_account.delete_resource_link_table( - table=table2 - ) - # Then - glue_mock.assert_called_once() - glue_mock2.assert_called_once() - - -def test_delete_shared_database( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table1: models.DatasetTable, - mocker, -): - glue_mock = mocker.patch( - "dataall.aws.handlers.glue.Glue.delete_database", - return_value=True, - ) - - processor_same_account.delete_shared_database() - # Then - glue_mock.assert_called_once() - - # Reset mocks - glue_mock.reset_mock() - - processor_cross_account.delete_shared_database() - # Then - glue_mock.assert_called_once() - - -def test_revoke_external_account_access_on_source_account( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_same_account: models.ShareObject, - share_cross_account: models.ShareObject, - source_environment: models.Environment, - target_environment: models.Environment, - dataset1: models.Dataset, - table1: models.DatasetTable, - table2: models.DatasetTable, - mocker, -): - lf_mock = mocker.patch( - "dataall.aws.handlers.lakeformation.LakeFormation.batch_revoke_permissions", - return_value=True, - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.remote_session", - return_value=boto3.Session(), - ) - - processor_cross_account.revoke_external_account_access_on_source_account() - # Then - lf_mock.assert_called_once() - -def test_handle_share_failure( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - share_item_same_account: models.ShareObjectItem, - share_item_cross_account: models.ShareObjectItem, - table1: models.DatasetTable, - mocker, -): - - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_table_sharing_failure_alarm") - error = Exception - - # When - processor_same_account.handle_share_failure(table1, share_item_same_account, error) - - # Then - alarm_service_mock.assert_called_once() - - # Reset mock - alarm_service_mock.reset_mock() - - # When - processor_cross_account.handle_share_failure(table1, share_item_cross_account, error) - - # Then - alarm_service_mock.assert_called_once() - -def test_handle_revoke_failure( - db, - processor_same_account: ProcessLFSameAccountShare, - processor_cross_account: ProcessLFCrossAccountShare, - revoke_item_same_account: models.ShareObjectItem, - revoke_item_cross_account: models.ShareObjectItem, - table1: models.DatasetTable, - mocker, -): - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_revoke_table_sharing_failure_alarm") - error = Exception - - # When - processor_same_account.handle_revoke_failure(table1, revoke_item_same_account, error) - - # Then - alarm_service_mock.assert_called_once() - - # Reset mock - alarm_service_mock.reset_mock() - - # When - processor_cross_account.handle_revoke_failure(table1, revoke_item_cross_account, error) - - # Then - alarm_service_mock.assert_called_once() diff --git a/tests/tasks/test_policies.py b/tests/tasks/test_policies.py deleted file mode 100644 index d51cc2ac7..000000000 --- a/tests/tasks/test_policies.py +++ /dev/null @@ -1,153 +0,0 @@ -from dataall.api.constants import OrganisationUserRole -from dataall.tasks.bucket_policy_updater import BucketPoliciesUpdater -import pytest -import dataall - - -@pytest.fixture(scope='module', autouse=True) -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def sync_dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - imported=True, - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(org, env, db, sync_dataset): - with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( - datasetUri=sync_dataset.datasetUri, - AWSAccountId='12345678901', - S3Prefix='S3prefix', - label='label', - owner='foo', - name='name', - GlueTableName='table1', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - region='eu-west-1', - ) - session.add(table) - yield table - - -def test_prefix_delta(): - s = 's3://insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2/ship_plan.delta/_symlink_format_manifest/*' - delta_path = s.split('/_symlink_format_manifest')[0].split('/')[-1] - prefix = s.split(f'/{delta_path}')[0] - assert ( - prefix - == 's3://insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2' - ) - prefix = 'arn:aws:s3:::insite-data-lake-core-alpha-eu-west-1/forecast/ship_plan/insite_version=0.1/insite_region_id=2' - bucket = prefix.split('arn:aws:s3:::')[1].split('/')[0] - assert bucket == 'insite-data-lake-core-alpha-eu-west-1' - - -def test_group_prefixes_by_accountid(db, mocker): - statements = {} - updater = BucketPoliciesUpdater(db) - updater.group_prefixes_by_accountid('675534', 'prefix1', statements) - updater.group_prefixes_by_accountid('675534', 'prefix2', statements) - updater.group_prefixes_by_accountid('675534', 'prefix3', statements) - updater.group_prefixes_by_accountid('675534', 'prefix3', statements) - updater.group_prefixes_by_accountid('3455', 'prefix4', statements) - assert len(set(statements['675534'])) == 3 - policy = { - 'Version': '2012-10-17', - 'Statement': [ - { - 'Sid': f'OwnerAccount', - 'Effect': 'Allow', - 'Action': ['s3:*'], - 'Resource': [ - f'arn:aws:s3:::', - f'arn:aws:s3:::', - ], - 'Principal': {'AWS': f'arn:aws:iam::root'}, - }, - { - 'Sid': f'DH675534', - 'Effect': 'Allow', - 'Action': ['s3:*'], - 'Resource': [ - f'prefix3', - f'prefix2', - ], - 'Principal': {'AWS': '675534'}, - }, - ], - } - BucketPoliciesUpdater.update_policy(statements, policy) - assert policy - - -def test_handler(org, env, db, sync_dataset, mocker): - mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.init_s3_client', - return_value=True, - ) - mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.get_bucket_policy', - return_value={'Version': '2012-10-17', 'Statement': []}, - ) - mocker.patch( - 'dataall.tasks.bucket_policy_updater.BucketPoliciesUpdater.put_bucket_policy', - return_value={'status': 'SUCCEEDED'}, - ) - updater = BucketPoliciesUpdater(db) - assert len(updater.sync_imported_datasets_bucket_policies()) == 1 - assert updater.sync_imported_datasets_bucket_policies()[0]['status'] == 'SUCCEEDED' diff --git a/tests/tasks/test_s3_share_manager.py b/tests/tasks/test_s3_share_manager.py deleted file mode 100644 index 53c7f426b..000000000 --- a/tests/tasks/test_s3_share_manager.py +++ /dev/null @@ -1,1486 +0,0 @@ -import pytest -import json - -from typing import Callable - -from dataall.db import models - -from dataall.tasks.data_sharing.share_managers.s3_share_manager import S3ShareManager -from dataall.utils.alarm_service import AlarmService - - -SOURCE_ENV_ACCOUNT = "111111111111" -SOURCE_ENV_ROLE_NAME = "dataall-ProducerEnvironment-i6v1v1c2" - - -TARGET_ACCOUNT_ENV = "222222222222" -TARGET_ACCOUNT_ENV_ROLE_NAME = "dataall-ConsumersEnvironment-r71ucp4m" - - -@pytest.fixture(scope="module") -def org1(org: Callable) -> models.Organization: - org1 = org(label="org", owner="alice", SamlGroupName="admins") - yield org1 - - -@pytest.fixture(scope="module") -def source_environment(environment: Callable, org1: models.Organization, group: models.Group): - source_environment = environment( - organization=org1, - awsAccountId=SOURCE_ENV_ACCOUNT, - label="source_environment", - owner=group.owner, - samlGroupName=group.name, - environmentDefaultIAMRoleName=SOURCE_ENV_ROLE_NAME, - ) - yield source_environment - - -@pytest.fixture(scope="module") -def source_environment_group(environment_group: Callable, source_environment: models.Environment, group: models.Group): - source_environment_group = environment_group(source_environment, group) - yield source_environment_group - - -@pytest.fixture(scope="module") -def target_environment(environment: Callable, org1: models.Organization, group2: models.Group): - target_environment = environment( - organization=org1, - awsAccountId=TARGET_ACCOUNT_ENV, - label="target_environment", - owner=group2.owner, - samlGroupName=group2.name, - environmentDefaultIAMRoleName=TARGET_ACCOUNT_ENV_ROLE_NAME, - ) - yield target_environment - - -@pytest.fixture(scope="module") -def target_environment_group(environment_group: Callable, target_environment: models.Environment, group2: models.Group): - target_environment_group = environment_group(target_environment, group2) - yield target_environment_group - - -@pytest.fixture(scope="module") -def dataset1(dataset: Callable, org1: models.Organization, source_environment: models.Environment): - dataset1 = dataset(org1, source_environment, "dataset1") - yield dataset1 - - -@pytest.fixture(scope="module") -def location1(location: Callable, dataset1: models.Dataset) -> models.DatasetStorageLocation: - yield location(dataset1, "location1") - - -@pytest.fixture(scope="module") -def share1(share: Callable, dataset1: models.Dataset, - target_environment: models.Environment, - target_environment_group: models.EnvironmentGroup) -> models.ShareObject: - share1 = share(dataset1, target_environment, target_environment_group) - yield share1 - - -@pytest.fixture(scope="module") -def share_item_folder1(share_item_folder: Callable, share1: models.ShareObject, location1: models.DatasetStorageLocation): - share_item_folder1 = share_item_folder(share1, location1) - return share_item_folder1 - - -@pytest.fixture(scope="module") -def base_bucket_policy(): - bucket_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Deny", - "Principal": {"AWS": "*"}, - "Action": "s3:*", - "Resource": ["arn:aws:s3:::dataall-iris-test-120922-4s47wv71", "arn:aws:s3:::dataall-iris-test-120922-4s47wv71/*"], - "Condition": {"Bool": {"aws:SecureTransport": "false"}}, - }, - { - "Effect": "Allow", - "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, - "Action": "s3:*", - "Resource": "arn:aws:s3:::dataall-iris-test-120922-4s47wv71", - }, - ], - } - return bucket_policy - - -@pytest.fixture(scope="module") -def admin_ap_delegation_bucket_policy(): - bucket_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Deny", - "Principal": {"AWS": "*"}, - "Action": "s3:*", - "Resource": ["arn:aws:s3:::dataall-iris-test-120922-4s47wv71", "arn:aws:s3:::dataall-iris-test-120922-4s47wv71/*"], - "Condition": {"Bool": {"aws:SecureTransport": "false"}}, - }, - { - "Effect": "Allow", - "Principal": {"AWS": "arn:aws:iam::111111111111:root"}, - "Action": "s3:*", - "Resource": "arn:aws:s3:::dataall-iris-test-120922-4s47wv71", - }, - { - "Sid": "AllowAllToAdmin", - "Effect": "Allow", - "Principal": "*", - "Action": "s3:*", - "Resource": ["arn:aws:s3:::bucket-name", "arn:aws:s3:::bucket-name/*"], - "Condition": {"StringLike": {"aws:userId": "11111"}}, - }, - ], - } - - return bucket_policy - - -@pytest.fixture(scope="module") -def target_dataset_access_control_policy(request): - - iam_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": ["s3:*"], - "Resource": [ - f"arn:aws:s3:::{request.param[0]}", - f"arn:aws:s3:::{request.param[0]}/*", - f"arn:aws:s3:datasetregion:{request.param[1]}:accesspoint/{request.param[2]}", - f"arn:aws:s3:datasetregion:{request.param[1]}:accesspoint/{request.param[2]}/*", - ], - } - ], - } - - return iam_policy - - -def test_manage_bucket_policy_no_policy( - mocker, - source_environment_group, - target_environment_group, - dataset1, - db, - share1: models.ShareObject, - share_item_folder1, - location1, - source_environment: models.Environment, - target_environment: models.Environment, - base_bucket_policy, -): - - # Given - bucket_policy = base_bucket_policy - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_policy", - return_value=json.dumps(bucket_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_delegation_role_arn", - return_value="arn:role", - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_ids", - return_value=[1, 2, 3], - ) - - s3_create_bucket_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.create_bucket_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.manage_bucket_policy() - - created_bucket_policy = json.loads(s3_create_bucket_mock.call_args.args[3]) - - # Then - print(f"Bucket policy generated {created_bucket_policy}") - - sid_list = [statement.get("Sid") for statement in - created_bucket_policy["Statement"] if statement.get("Sid")] - - assert "AllowAllToAdmin" in sid_list - assert "DelegateAccessToAccessPoint" in sid_list - - -def test_manage_bucket_policy_existing_policy( - mocker, - source_environment_group, - target_environment_group, - dataset1, - db, - share1: models.ShareObject, - share_item_folder1, - location1, - source_environment: models.Environment, - target_environment: models.Environment, - admin_ap_delegation_bucket_policy, -): - - # Given - bucket_policy = admin_ap_delegation_bucket_policy - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_policy", - return_value=json.dumps(bucket_policy), - ) - - s3_create_bucket_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.create_bucket_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.manage_bucket_policy() - - # Then - s3_create_bucket_mock.assert_not_called() - - -@pytest.mark.parametrize("target_dataset_access_control_policy", - ([("bucketname", "aws_account_id", "access_point_name")]), - indirect=True) -def test_grant_target_role_access_policy_existing_policy_bucket_not_included( - mocker, - source_environment_group, - target_environment_group, - dataset1, - db, - share1: models.ShareObject, - share_item_folder1, - location1, - source_environment: models.Environment, - target_environment: models.Environment, - target_dataset_access_control_policy, -): - - # Given - iam_policy = target_dataset_access_control_policy - - mocker.patch( - "dataall.aws.handlers.iam.IAM.get_role_policy", - return_value=iam_policy, - ) - - iam_update_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.update_role_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.grant_target_role_access_policy() - - # Then - iam_update_role_policy_mock.assert_called() - - # Iam function is called with str from object so we transform back to object - policy_object = json.loads(iam_update_role_policy_mock.call_args.args[3]) - - # Assert that bucket_name is inside the resource array of policy object - assert location1.S3BucketName in ",".join(policy_object["Statement"][0]["Resource"]) - - -@pytest.mark.parametrize("target_dataset_access_control_policy", ([("dataset1", SOURCE_ENV_ACCOUNT, "test")]), indirect=True) -def test_grant_target_role_access_policy_existing_policy_bucket_included( - mocker, - source_environment_group, - target_environment_group, - dataset1, - db, - share1: models.ShareObject, - share_item_folder1, - location1, - source_environment: models.Environment, - target_environment: models.Environment, - target_dataset_access_control_policy, -): - - # Given - iam_policy = target_dataset_access_control_policy - - mocker.patch( - "dataall.aws.handlers.iam.IAM.get_role_policy", - return_value=iam_policy, - ) - - iam_update_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.update_role_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.grant_target_role_access_policy() - - # Then - iam_update_role_policy_mock.assert_not_called() - - -def test_grant_target_role_access_policy_test_no_policy( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - - # Given - mocker.patch( - "dataall.aws.handlers.iam.IAM.get_role_policy", - return_value=None, - ) - - iam_update_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.update_role_policy", - return_value=None, - ) - - expected_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": ["s3:*"], - "Resource": [ - f"arn:aws:s3:::{location1.S3BucketName}", - f"arn:aws:s3:::{location1.S3BucketName}/*", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{share_item_folder1.S3AccessPointName}", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{share_item_folder1.S3AccessPointName}/*", - ], - } - ], - } - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.grant_target_role_access_policy() - - # Then - iam_update_role_policy_mock.assert_called_with( - target_environment.AwsAccountId, share1.principalIAMRoleName, - "targetDatasetAccessControlPolicy", json.dumps(expected_policy) - ) - - -def test_update_dataset_bucket_key_policy_with_env_admin( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_id", - return_value=None, - ) - - existing_key_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": f"{target_environment.SamlGroupName}", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, - } - ], - } - - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_policy", - return_value=json.dumps(existing_key_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - kms_put_key_policy_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.put_key_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.update_dataset_bucket_key_policy() - - # Then - kms_put_key_policy_mock.assert_not_called() - - -def _generate_ap_policy_object( - access_point_arn: str, - env_admin_prefix_list: list, -): - new_ap_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AllowAllToAdmin", - "Effect": "Allow", - "Principal": "*", - "Action": "s3:*", - "Resource": "access-point-arn", - "Condition": {"StringLike": {"aws:userId": ["dataset_admin_role_id:*", "source_env_admin_role_id:*", "source_account_pivot_role_id:*"]}}, - }, - ], - } - - for statement in env_admin_prefix_list: - first_half = { - "Sid": f"{statement[0]}0", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "s3:ListBucket", - "Resource": f"{access_point_arn}", - "Condition": {"StringLike": {"s3:prefix": [], "aws:userId": [f"{statement[0]}"]}}, - } - second_half = { - "Sid": f"{statement[0]}1", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "s3:GetObject", - "Resource": [], - "Condition": {"StringLike": {"aws:userId": [f"{statement[0]}:*"]}}, - } - prefix_list = [] - for prefix in statement[1]: - prefix_list.append(f"{prefix}/*") - second_half["Resource"].append(f"{access_point_arn}/object/{prefix}/*") - - if len(prefix_list) > 1: - first_half["Condition"]["StringLike"]["s3:prefix"] = prefix_list - else: - first_half["Condition"]["StringLike"]["s3:prefix"] = prefix_list[0] - - new_ap_policy["Statement"].append(first_half) - new_ap_policy["Statement"].append(second_half) - - return new_ap_policy - - -def test_update_dataset_bucket_key_policy_without_env_admin( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_id", - return_value="kms-key", - ) - - existing_key_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "different_env_admin_id", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": "different_env_admin_id:*"}}, - } - ], - } - - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_policy", - return_value=json.dumps(existing_key_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - new_key_policy = { - "Sid": f"{target_environment.SamlGroupName}", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, - } - - kms_put_key_policy_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.put_key_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.update_dataset_bucket_key_policy() - - existing_key_policy["Statement"].append(new_key_policy) - - expected_complete_key_policy = existing_key_policy - - # Then - kms_put_key_policy_mock.assert_called_with(source_environment.AwsAccountId, "eu-central-1", "kms-key", "default", json.dumps(expected_complete_key_policy)) - - -# NO existing Access point and ap policy -def test_manage_access_point_and_policy_1( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value=None, - ) - - s3_create_bucket_access_point_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.create_bucket_access_point", - return_value="new-access-point-arn", - ) - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value="new-access-point-arn" - ) - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=None, - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_ids", - return_value=["dataset_admin_role_id:*", "source_env_admin_role_id:*" "source_account_pivot_role_id:*"], - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_delegation_role_arn", - return_value=None, - ) - - s3_attach_access_point_policy_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.attach_access_point_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.manage_access_point_and_policy() - - # Then - s3_attach_access_point_policy_mock.assert_called() - policy = s3_attach_access_point_policy_mock.call_args.kwargs.get('policy') - new_ap_policy = json.loads(policy) - - # Asser that access point is in resource - assert new_ap_policy["Statement"][0]["Resource"] == s3_create_bucket_access_point_mock.return_value - - # Assert that listbucket and getobject permissions were added for target environment admin - assert "s3:ListBucket" in [ - statement["Action"] for statement in new_ap_policy["Statement"] if statement["Sid"].startswith(target_environment.SamlGroupName) - ] - assert "s3:GetObject" in [ - statement["Action"] for statement in new_ap_policy["Statement"] if statement["Sid"].startswith(target_environment.SamlGroupName) - ] - - # Assert AllowAllToAdmin "Sid" exists - assert len([statement for statement in new_ap_policy["Statement"] if statement["Sid"] == "AllowAllToAdmin"]) > 0 - - -# Existing Access point and ap policy -# target_env_admin is already in policy -# current folder is NOT yet in prefix_list -def test_manage_access_point_and_policy_2( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - - # Existing access point - s3_get_bucket_access_point_arn_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value="existing-access-point-arn", - ) - - # target_env_admin is already in policy but current folder is NOT yet in prefix_list - existing_ap_policy = _generate_ap_policy_object(s3_get_bucket_access_point_arn_mock.return_value, [[target_environment.SamlGroupName, ["existing-prefix"]]]) - - # Existing access point policy - mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - s3_attach_access_point_policy_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.attach_access_point_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.manage_access_point_and_policy() - - # Then - s3_attach_access_point_policy_mock.assert_called() - policy = s3_attach_access_point_policy_mock.call_args.kwargs.get('policy') - - # Assert S3 Prefix of share folder in prefix_list - new_ap_policy = json.loads(policy) - statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} - prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] - - assert f"{location1.S3Prefix}/*" in prefix_list - - # Assert s3 prefix is in resource_list - resource_list = statements[f"{target_environment.SamlGroupName}1"]["Resource"] - - assert f"{s3_get_bucket_access_point_arn_mock.return_value}/object/{location1.S3Prefix}/*" in resource_list - - -# Existing Access point and ap policy -# target_env_admin is NOT already in ap policy -# current folder is NOT yet in prefix_list -def test_manage_access_point_and_policy_3( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - - # Existing access point - s3_get_bucket_access_point_arn_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value="existing-access-point-arn", - ) - - # New target env admin and prefix are not in existing ap policy - existing_ap_policy = _generate_ap_policy_object(s3_get_bucket_access_point_arn_mock.return_value, [["another-env-admin", ["existing-prefix"]]]) - - # Existing access point policy - mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - s3_attach_access_point_policy_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.attach_access_point_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.manage_access_point_and_policy() - - # Then - s3_attach_access_point_policy_mock.assert_called() - - # Assert S3 Prefix of share folder in prefix_list - policy = s3_attach_access_point_policy_mock.call_args.kwargs.get('policy') - new_ap_policy = json.loads(policy) - statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} - prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] - - assert f"{location1.S3Prefix}/*" in prefix_list - - # Assert s3 prefix is in resource_list - resource_list = statements[f"{target_environment.SamlGroupName}1"]["Resource"] - - assert f"{s3_get_bucket_access_point_arn_mock.return_value}/object/{location1.S3Prefix}/*" in resource_list - - -def test_delete_access_point_policy_with_env_admin_one_prefix( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - - # Existing access point - s3_get_bucket_access_point_arn_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value="existing-access-point-arn", - ) - - # New target env admin and prefix are already in existing ap policy - # Another admin is part of this policy - existing_ap_policy = _generate_ap_policy_object( - s3_get_bucket_access_point_arn_mock.return_value, - [[target_environment.SamlGroupName, [location1.S3Prefix]], ["another-env-admin", [location1.S3Prefix]]], - ) - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - s3_attach_access_point_policy_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.attach_access_point_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_access_point_policy() - - # Then - s3_attach_access_point_policy_mock.assert_called() - - # Assert statements for share have been removed - new_ap_policy = json.loads(s3_attach_access_point_policy_mock.call_args.args[3]) - deleted_statements = {item["Sid"]: item for item in new_ap_policy["Statement"] if item["Sid"].startswith(f"{target_environment.SamlGroupName}")} - - assert len(deleted_statements) == 0 - - # Assert other statements are remaining - remaining_statements = {item["Sid"]: item for item in new_ap_policy["Statement"] if not item["Sid"].startswith(f"{target_environment.SamlGroupName}")} - - assert len(remaining_statements) > 0 - - -def test_delete_access_point_policy_with_env_admin_multiple_prefix( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - - s3_get_bucket_access_point_arn_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_bucket_access_point_arn", - return_value="existing-access-point-arn", - ) - - existing_ap_policy = _generate_ap_policy_object( - s3_get_bucket_access_point_arn_mock.return_value, - [[target_environment.SamlGroupName, [location1.S3Prefix, "another-prefix"]], ["another-env-admin", [location1.S3Prefix]]], - ) - - mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - s3_attach_access_point_policy_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.attach_access_point_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_access_point_policy() - - # Then - s3_attach_access_point_policy_mock.assert_called() - - # Assert statements for share have been removed - new_ap_policy = json.loads(s3_attach_access_point_policy_mock.call_args.args[3]) - statements = {item["Sid"]: item for item in new_ap_policy["Statement"]} - - remaining_prefix_list = statements[f"{target_environment.SamlGroupName}0"]["Condition"]["StringLike"]["s3:prefix"] - - assert location1.S3Prefix not in remaining_prefix_list - assert "another-prefix/*" in remaining_prefix_list - - -def test_dont_delete_access_point_with_policy( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - existing_ap_policy = _generate_ap_policy_object("access-point-arn", [[target_environment.SamlGroupName, ["existing-prefix"]]]) - - s3_delete_bucket_access_point_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - s3_delete_bucket_access_point_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.delete_bucket_access_point", - return_value=None, - ) - - # When - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - is_deleted = manager.delete_access_point(share1, dataset1) - - # Then - assert not is_deleted - assert not s3_delete_bucket_access_point_mock.called - - -def test_delete_access_point_without_policy( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given ap policy that only includes AllowAllToAdminStatement - existing_ap_policy = _generate_ap_policy_object("access-point-arn", []) - - s3_delete_bucket_access_point_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.get_access_point_policy", - return_value=json.dumps(existing_ap_policy), - ) - - s3_delete_bucket_access_point_mock = mocker.patch( - "dataall.aws.handlers.s3.S3.delete_bucket_access_point", - return_value=None, - ) - - # When - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - is_deleted = manager.delete_access_point(share1, dataset1) - - # Then - assert is_deleted - assert s3_delete_bucket_access_point_mock.called - - -def test_delete_target_role_access_policy_no_remaining_statement( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given ap policy that only includes AllowAllToAdminStatement - existing_target_role_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": ["s3:*"], - "Resource": [ - f"arn:aws:s3:::{location1.S3BucketName}", - f"arn:aws:s3:::{location1.S3BucketName}/*", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}/*", - ], - } - ], - } - - mocker.patch( - "dataall.aws.handlers.iam.IAM.get_role_policy", - return_value=existing_target_role_policy, - ) - - iam_delete_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.delete_role_policy", - return_value=None, - ) - - iam_update_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.update_role_policy", - return_value=None, - ) - - # When - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_target_role_access_policy(share1, dataset1, target_environment) - - # Then - iam_delete_role_policy_mock.assert_called() - iam_update_role_policy_mock.assert_not_called() - - -def test_delete_target_role_access_policy_with_remaining_statement( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - # target role policy that has a bucket unrelated to the current bucket to be deleted - existing_target_role_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": ["s3:*"], - "Resource": [ - "arn:aws:s3:::UNRELATED_BUCKET_ARN", - f"arn:aws:s3:::{location1.S3BucketName}", - f"arn:aws:s3:::{location1.S3BucketName}/*", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}", - f"arn:aws:s3:{dataset1.region}:{dataset1.AwsAccountId}:accesspoint/{S3ShareManager.build_access_point_name(share1)}/*", - ], - } - ], - } - - expected_remaining_target_role_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Action": ["s3:*"], - "Resource": ["arn:aws:s3:::UNRELATED_BUCKET_ARN"], - } - ], - } - - mocker.patch( - "dataall.aws.handlers.iam.IAM.get_role_policy", - return_value=existing_target_role_policy, - ) - - iam_delete_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.delete_role_policy", - return_value=None, - ) - - iam_update_role_policy_mock = mocker.patch( - "dataall.aws.handlers.iam.IAM.update_role_policy", - return_value=None, - ) - - # When - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_target_role_access_policy(share1, dataset1, target_environment) - - # Then - iam_delete_role_policy_mock.assert_not_called() - - iam_update_role_policy_mock.assert_called_with( - target_environment.AwsAccountId, - share1.principalIAMRoleName, - "targetDatasetAccessControlPolicy", - json.dumps(expected_remaining_target_role_policy), - ) - - -# The kms key policy includes the target env admin to be removed aswell as one additional target env -# admin, that should remain -def test_delete_dataset_bucket_key_policy_existing_policy_with_additional_target_env( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - kms_get_key_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_id", - return_value="1", - ) - - # Includes target env admin to be removed and another, that should remain - existing_key_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": f"{target_environment.SamlGroupName}", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, - }, - { - "Sid": "REMAINING_TARGET_ENV_ADMIN_ID", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": "REMAINING_TARGET_ENV_ADMIN_ID:*"}}, - }, - ], - } - - remaining_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "REMAINING_TARGET_ENV_ADMIN_ID", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": "REMAINING_TARGET_ENV_ADMIN_ID:*"}}, - } - ], - } - - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_policy", - return_value=json.dumps(existing_key_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - kms_put_key_policy_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.put_key_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_dataset_bucket_key_policy(share1, dataset1, target_environment) - - # Then - kms_put_key_policy_mock.assert_called() - kms_put_key_policy_mock.assert_called_with(source_environment.AwsAccountId, 'eu-central-1', kms_get_key_mock.return_value, "default", json.dumps(remaining_policy)) - - -# The kms key policy only includes the target env admin -def test_delete_dataset_bucket_key_policy_existing_policy_with_no_additional_target_env( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - kms_get_key_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_id", - return_value="1", - ) - - # Includes target env admin to be removed and another, that should remain - existing_key_policy = { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": f"{target_environment.SamlGroupName}", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": "kms:Decrypt", - "Resource": "*", - "Condition": {"StringLike": {"aws:userId": f"{target_environment.SamlGroupName}:*"}}, - } - ], - } - - remaining_policy = { - "Version": "2012-10-17", - "Statement": [], - } - - mocker.patch( - "dataall.aws.handlers.kms.KMS.get_key_policy", - return_value=json.dumps(existing_key_policy), - ) - - mocker.patch( - "dataall.aws.handlers.sts.SessionHelper.get_role_id", - return_value=target_environment.SamlGroupName, - ) - - kms_put_key_policy_mock = mocker.patch( - "dataall.aws.handlers.kms.KMS.put_key_policy", - return_value=None, - ) - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - # When - manager.delete_dataset_bucket_key_policy(share1, dataset1, target_environment) - - # Then - kms_put_key_policy_mock.assert_called() - kms_put_key_policy_mock.assert_called_with(source_environment.AwsAccountId, 'eu-central-1', kms_get_key_mock.return_value, "default", json.dumps(remaining_policy)) - - -def test_handle_share_failure( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_folder_sharing_failure_alarm") - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - error = Exception - # When - manager.handle_share_failure(error) - - # Then - alarm_service_mock.assert_called() - - -def test_handle_revoke_failure( - mocker, - source_environment_group: models.EnvironmentGroup, - target_environment_group: models.EnvironmentGroup, - dataset1: models.Dataset, - db, - share1: models.ShareObject, - share_item_folder1: models.ShareObjectItem, - location1: models.DatasetStorageLocation, - source_environment: models.Environment, - target_environment: models.Environment, -): - # Given - alarm_service_mock = mocker.patch.object(AlarmService, "trigger_revoke_folder_sharing_failure_alarm") - - with db.scoped_session() as session: - manager = S3ShareManager( - session, - dataset1, - share1, - location1, - source_environment, - target_environment, - source_environment_group, - target_environment_group, - ) - - error = Exception - # When - manager.handle_revoke_failure(error) - - # Then - alarm_service_mock.assert_called() diff --git a/tests/tasks/test_stacks_updater.py b/tests/tasks/test_stacks_updater.py deleted file mode 100644 index 1bc63c3c3..000000000 --- a/tests/tasks/test_stacks_updater.py +++ /dev/null @@ -1,75 +0,0 @@ -import pytest -import dataall -from dataall.api.constants import OrganisationUserRole - - -@pytest.fixture(scope='module', autouse=True) -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module', autouse=True) -def sync_dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -def test_stacks_update(db, org, env, sync_dataset, mocker): - mocker.patch( - 'dataall.tasks.stacks_updater.update_stack', - return_value=True, - ) - envs, datasets = dataall.tasks.stacks_updater.update_stacks( - engine=db, envname='local' - ) - assert len(envs) == 1 - assert len(datasets) == 1 diff --git a/tests/tasks/test_subscriptions.py b/tests/tasks/test_subscriptions.py deleted file mode 100644 index 25cd6178a..000000000 --- a/tests/tasks/test_subscriptions.py +++ /dev/null @@ -1,152 +0,0 @@ -import pytest - -import dataall -from dataall.api.constants import OrganisationUserRole - - -@pytest.fixture(scope='module') -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module') -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module') -def otherenv(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='987654321', - region='eu-west-1', - label='org', - owner='bob', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - yield env - - -@pytest.fixture(scope='module') -def dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='alice', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - yield dataset - - -@pytest.fixture(scope='module') -def share( - dataset: dataall.db.models.Dataset, - db: dataall.db.Engine, - otherenv: dataall.db.models.Environment, -): - with db.scoped_session() as session: - - table = dataall.db.models.DatasetTable( - label='foo', - name='foo', - owner='alice', - description='test table', - tags=['a', 'b'], - datasetUri=dataset.datasetUri, - tableUri='foo', - S3Prefix='s3://dataset/testtable/csv/', - GlueDatabaseName=dataset.GlueDatabaseName, - GlueTableName='foo', - S3BucketName=dataset.S3BucketName, - AWSAccountId=dataset.AwsAccountId, - region=dataset.region, - ) - session.add(table) - share = dataall.db.models.ShareObject( - datasetUri=dataset.datasetUri, - environmentUri=otherenv.environmentUri, - owner='bob', - principalId='group2', - principalType=dataall.api.constants.PrincipalType.Environment.value, - status=dataall.api.constants.ShareObjectStatus.Approved.value, - ) - session.add(share) - session.commit() - share_item = dataall.db.models.ShareObjectItem( - shareUri=share.shareUri, - owner='alice', - itemUri=table.tableUri, - itemType=dataall.api.constants.ShareableType.Table.value, - itemName=table.GlueTableName, - GlueDatabaseName=table.GlueDatabaseName, - GlueTableName=table.GlueTableName, - status=dataall.api.constants.ShareItemStatus.Share_Approved.value, - ) - session.add(share_item) - - -def test_subscriptions(org, env, otherenv, db, dataset, share, mocker): - mocker.patch( - 'dataall.tasks.subscriptions.subscription_service.SubscriptionService.sns_call', - return_value=True, - ) - subscriber = dataall.tasks.subscriptions.subscription_service.SubscriptionService() - messages = [ - { - 'prefix': 's3://dataset/testtable/csv/', - 'accountid': '123456789012', - 'region': 'eu-west-1', - } - ] - envs = subscriber.get_environments(db) - assert envs - queues = subscriber.get_queues(envs) - assert queues - assert subscriber.notify_consumers(db, messages) diff --git a/tests/tasks/test_tables_sync.py b/tests/tasks/test_tables_sync.py deleted file mode 100644 index 812dda1bd..000000000 --- a/tests/tasks/test_tables_sync.py +++ /dev/null @@ -1,166 +0,0 @@ -import pytest -import dataall -from dataall.api.constants import OrganisationUserRole - - -@pytest.fixture(scope='module', autouse=True) -def org(db): - with db.scoped_session() as session: - org = dataall.db.models.Organization( - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - userRoleInOrganization=OrganisationUserRole.Owner.value, - ) - session.add(org) - yield org - - -@pytest.fixture(scope='module', autouse=True) -def env(org, db): - with db.scoped_session() as session: - env = dataall.db.models.Environment( - organizationUri=org.organizationUri, - AwsAccountId='12345678901', - region='eu-west-1', - label='org', - owner='alice', - tags=[], - description='desc', - SamlGroupName='admins', - EnvironmentDefaultIAMRoleName='EnvRole', - EnvironmentDefaultIAMRoleArn='arn:aws::123456789012:role/EnvRole/GlueJobSessionRunner', - CDKRoleArn='arn:aws::123456789012:role/EnvRole', - userRoleInEnvironment='999', - ) - session.add(env) - session.commit() - yield env - - -@pytest.fixture(scope='module', autouse=True) -def sync_dataset(org, env, db): - with db.scoped_session() as session: - dataset = dataall.db.models.Dataset( - organizationUri=org.organizationUri, - environmentUri=env.environmentUri, - label='label', - owner='foo', - SamlAdminGroupName='foo', - businessOwnerDelegationEmails=['foo@amazon.com'], - businessOwnerEmail=['bar@amazon.com'], - name='name', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - KmsAlias='kmsalias', - AwsAccountId='123456789012', - region='eu-west-1', - IAMDatasetAdminUserArn=f'arn:aws:iam::123456789012:user/dataset', - IAMDatasetAdminRoleArn=f'arn:aws:iam::123456789012:role/dataset', - ) - session.add(dataset) - session.commit() - env_group = dataall.db.models.EnvironmentGroup( - environmentUri=env.environmentUri, - groupUri=dataset.SamlAdminGroupName, - environmentIAMRoleArn=env.EnvironmentDefaultIAMRoleArn, - environmentIAMRoleName=env.EnvironmentDefaultIAMRoleName, - environmentAthenaWorkGroup='workgroup', - ) - session.add(env_group) - yield dataset - - -@pytest.fixture(scope='module', autouse=True) -def table(org, env, db, sync_dataset): - with db.scoped_session() as session: - table = dataall.db.models.DatasetTable( - datasetUri=sync_dataset.datasetUri, - AWSAccountId='12345678901', - S3Prefix='S3prefix', - label='label', - owner='foo', - name='name', - GlueTableName='table1', - S3BucketName='S3BucketName', - GlueDatabaseName='GlueDatabaseName', - region='eu-west-1', - ) - session.add(table) - yield table - - -def _test_tables_sync(db, org, env, sync_dataset, table, mocker): - mocker.patch( - 'dataall.aws.handlers.glue.Glue.list_glue_database_tables', - return_value=[ - { - 'Name': 'new_table', - 'DatabaseName': sync_dataset.GlueDatabaseName, - 'StorageDescriptor': { - 'Columns': [ - { - 'Name': 'col1', - 'Type': 'string', - 'Comment': 'comment_col', - 'Parameters': {'colp1': 'p1'}, - }, - ], - 'Location': f's3://{sync_dataset.S3BucketName}/table1', - 'Parameters': {'p1': 'p1'}, - }, - 'PartitionKeys': [ - { - 'Name': 'partition1', - 'Type': 'string', - 'Comment': 'comment_partition', - 'Parameters': {'partition_1': 'p1'}, - }, - ], - }, - { - 'Name': 'table1', - 'DatabaseName': sync_dataset.GlueDatabaseName, - 'StorageDescriptor': { - 'Columns': [ - { - 'Name': 'col1', - 'Type': 'string', - 'Comment': 'comment_col', - 'Parameters': {'colp1': 'p1'}, - }, - ], - 'Location': f's3://{sync_dataset.S3BucketName}/table1', - 'Parameters': {'p1': 'p1'}, - }, - 'PartitionKeys': [ - { - 'Name': 'partition1', - 'Type': 'string', - 'Comment': 'comment_partition', - 'Parameters': {'partition_1': 'p1'}, - }, - ], - }, - ], - ) - mocker.patch( - 'dataall.tasks.tables_syncer.is_assumable_pivot_role', return_value=True - ) - mocker.patch( - 'dataall.aws.handlers.glue.Glue.grant_principals_all_table_permissions', - return_value=True, - ) - - processed_tables = dataall.tasks.tables_syncer.sync_tables(engine=db) - assert len(processed_tables) == 2 - with db.scoped_session() as session: - saved_table: dataall.db.models.DatasetTable = ( - session.query(dataall.db.models.DatasetTable) - .filter(dataall.db.models.DatasetTable.GlueTableName == 'table1') - .first() - ) - assert saved_table - assert saved_table.GlueTableName == 'table1' diff --git a/tests/utils/clients/graphql.py b/tests/utils/clients/graphql.py deleted file mode 100644 index 8117e437e..000000000 --- a/tests/utils/clients/graphql.py +++ /dev/null @@ -1,90 +0,0 @@ -import typing -import json -import pytest -from ariadne import graphql_sync -from ariadne.constants import PLAYGROUND_HTML -from ariadne.asgi import GraphQL -from flask import Flask, request, jsonify, Response -from dotted.collection import DottedCollection -import dataall - - -class ClientWrapper: - def __init__(self, cli): - self.client = cli - - def query( - self, - query: str, - username: str = 'test', - groups: typing.List[str] = ['-'], - **variables, - ): - response: Response = self.client.post( - '/graphql', - json={'query': f""" {query} """, 'variables': variables}, - headers={'groups': json.dumps(groups), 'username': username}, - ) - - return DottedCollection.factory(response.get_json()) - - -@pytest.fixture(scope='module', autouse=True) -def app(db): - app = Flask('tests') - schema = dataall.api.get_executable_schema() - - @app.route('/', methods=['OPTIONS']) - def opt(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 - - @app.route('/graphql', methods=['GET']) - def graphql_playgroud(): - # On GET request serve GraphQL Playground - # You don't need to provide Playground if you don't want to - # but keep on mind this will not prohibit clients from - # exploring your API using desktop GraphQL Playground app. - return PLAYGROUND_HTML, 200 - - @app.route('/graphql', methods=['POST']) - def graphql_server(): - # GraphQL queries are always sent as POST - # Note: Passing the request to the context is optional. - # In Flask, the current request is always accessible as flask.request - data = request.get_json() - - username = request.headers.get('Username', 'anonym') - groups = json.loads(request.headers.get('Groups', '[]')) - success, result = graphql_sync( - schema, - data, - context_value={ - 'schema': None, - 'engine': db, - 'username': username, - 'groups': groups, - }, - debug=app.debug, - ) - - status_code = 200 if success else 400 - return jsonify(result), status_code - - yield app - - -@pytest.fixture(scope='module') -def client(app) -> ClientWrapper: - with app.test_client() as client: - yield ClientWrapper(client) - - -def deprecated(fn): - def wrapper(*args, **kwargs): - print(fn.__name__, 'is deprecated') - - return wrapper diff --git a/tests/utils/clients/rest.py b/tests/utils/clients/rest.py deleted file mode 100644 index 125e38f74..000000000 --- a/tests/utils/clients/rest.py +++ /dev/null @@ -1,24 +0,0 @@ -import pytest -from fastapi import FastAPI -from fastapi.testclient import TestClient -import os - -os.environ['envname'] = 'pytest' -from src.cdkproxymain import app -import dataall - -ENVNAME = os.environ.get('envname', 'pytest') - - -@pytest.fixture(scope='module') -def cdkclient(): - yield TestClient(app) - - -@pytest.fixture(scope='module') -def db() -> dataall.db.Engine: - engine = dataall.db.get_engine(envname=ENVNAME) - dataall.db.create_schema_and_tables(engine, envname=ENVNAME) - yield engine - engine.session().close() - engine.engine.dispose() diff --git a/tests/utils/factories/__init__.py b/tests/utils/factories/__init__.py deleted file mode 100644 index ff6dabec2..000000000 --- a/tests/utils/factories/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .org import org -from .env import env -from .dataset import dataset