From 1b5ba8fc891b23dc0eef9b4369857fd8d9a2abc2 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 25 Sep 2024 12:49:24 -0700 Subject: [PATCH 01/16] Skeletion notifications --- .../modules/notifications/conftest.py | 13 +++ .../modules/notifications/queries.py | 100 ++++++++++++++++++ .../notifications/test_noitifications.py | 31 ++++++ 3 files changed, 144 insertions(+) create mode 100644 tests_new/integration_tests/modules/notifications/conftest.py create mode 100644 tests_new/integration_tests/modules/notifications/queries.py create mode 100644 tests_new/integration_tests/modules/notifications/test_noitifications.py diff --git a/tests_new/integration_tests/modules/notifications/conftest.py b/tests_new/integration_tests/modules/notifications/conftest.py new file mode 100644 index 000000000..6e3e3951a --- /dev/null +++ b/tests_new/integration_tests/modules/notifications/conftest.py @@ -0,0 +1,13 @@ +# import pytest +# from integration_tests.core.vpc.queries import create_network, delete_network + + +# @pytest.fixture(scope='function') +# def notification1(client1): +# notification = None +# try: +# notification = create_notification(...) +# yield notification +# finally: +# if notification: +# delete_notification(client1, uri==notification.notificationUri) diff --git a/tests_new/integration_tests/modules/notifications/queries.py b/tests_new/integration_tests/modules/notifications/queries.py new file mode 100644 index 000000000..e9d3f96d2 --- /dev/null +++ b/tests_new/integration_tests/modules/notifications/queries.py @@ -0,0 +1,100 @@ +# TODO: This file will be replaced by using the SDK directly + + +def mark_notification_read(client, uri): + query = { + 'operationName': 'markNotificationAsRead', + 'variables': {'notificationUri': uri}, + 'query': """ + mutation markNotificationAsRead($notificationUri: String!) { + markNotificationAsRead(notificationUri: $notificationUri) + } + """, + } + response = client.query(query=query) + return response.data.markNotificationAsRead + + +def delete_notification(client, uri): + query = { + 'operationName': 'deleteNotification', + 'variables': {'notificationUri': uri}, + 'query': """ + mutation deleteNotification($notificationUri: String!) { + deleteNotification(notificationUri: $notificationUri) + } + """, + } + response = client.query(query=query) + return response.data.deleteNotification + + +## TODO +def list_notifications(client): + query = { + 'operationName': 'listNotifications', + 'variables': {'filter': {'term': ''}}, + 'query': """ + query listNotifications($filter: NotificationFilter) { + listNotifications(filter: $filter) { + count + page + pages + hasNext + hasPrevious + nodes { + notificationUri + message + type + is_read + target_uri + } + } + } + """, + } + response = client.query(query=query) + return response.data.listNotifications + + +def count_unread_notificiations(client): + query = { + 'operationName': 'countUnreadNotifications', + 'variables': {}, + 'query': """ + query countUnreadNotifications { + countUnreadNotifications + } + """, + } + response = client.query(query=query) + return response.data.countUnreadNotifications + + +## TODO +def count_read_notificiations(client): + query = { + 'operationName': 'countReadNotifications', + 'variables': {}, + 'query': """ + query countReadNotifications { + countReadNotifications + } + """, + } + response = client.query(query=query) + return response.data.countReadNotifications + + +def count_deleted_notificiations(client): + query = { + 'operationName': 'countDeletedNotifications', + 'variables': {}, + 'query': """ + query countDeletedNotifications { + countDeletedNotifications + } + """, + } + response = client.query(query=query) + return response.data.countUnreadNotifications diff --git a/tests_new/integration_tests/modules/notifications/test_noitifications.py b/tests_new/integration_tests/modules/notifications/test_noitifications.py new file mode 100644 index 000000000..b945093c5 --- /dev/null +++ b/tests_new/integration_tests/modules/notifications/test_noitifications.py @@ -0,0 +1,31 @@ +from assertpy import assert_that + +from integration_tests.errors import GqlError +from integration_tests.modules.notifications.queries import list_notifications, count_deleted_notificiations, count_read_notificiations, count_unread_notificiations, mark_notification_read + + +# TODO +def test_create_notification(client1, notification): + # assert_that + pass + + +# TODO +def test_list_notification(client1, notification): + pass + + +# TODO +def test_count_unread_notification(client1, notification): + pass + + +# TODO +def test_read_notification(client1, notification): + pass + + +# TODO +def test_delete_notificaiont_notification(client1, notification): + pass + From 72075df1ecd4e3185828001ae1fa976f1f2fb359 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 25 Sep 2024 12:50:02 -0700 Subject: [PATCH 02/16] Skeletion notifications --- .../modules/notifications/test_noitifications.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests_new/integration_tests/modules/notifications/test_noitifications.py b/tests_new/integration_tests/modules/notifications/test_noitifications.py index b945093c5..5b3fbfe63 100644 --- a/tests_new/integration_tests/modules/notifications/test_noitifications.py +++ b/tests_new/integration_tests/modules/notifications/test_noitifications.py @@ -1,7 +1,13 @@ from assertpy import assert_that from integration_tests.errors import GqlError -from integration_tests.modules.notifications.queries import list_notifications, count_deleted_notificiations, count_read_notificiations, count_unread_notificiations, mark_notification_read +from integration_tests.modules.notifications.queries import ( + list_notifications, + count_deleted_notificiations, + count_read_notificiations, + count_unread_notificiations, + mark_notification_read, +) # TODO @@ -28,4 +34,3 @@ def test_read_notification(client1, notification): # TODO def test_delete_notificaiont_notification(client1, notification): pass - From 39364d4a8ee6b03b5888a1a8c53d7d895a4f21c9 Mon Sep 17 00:00:00 2001 From: Sofia Sazonova Date: Fri, 27 Sep 2024 13:31:33 +0100 Subject: [PATCH 03/16] remove tooling --- tests_new/integration_tests/aws_clients/iam.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests_new/integration_tests/aws_clients/iam.py b/tests_new/integration_tests/aws_clients/iam.py index 8a3d6b361..99b94980d 100644 --- a/tests_new/integration_tests/aws_clients/iam.py +++ b/tests_new/integration_tests/aws_clients/iam.py @@ -21,14 +21,6 @@ def get_role(self, role_name): log.info(f'Error occurred: {e}') return None - @staticmethod - def get_tooling_account_id(): - session = boto3.Session() - param_client = session.client('ssm', os.environ.get('AWS_REGION', 'us-east-1')) - parameter_path = f"/dataall/{os.environ.get('ENVNAME', 'dev')}/toolingAccount" - toolingAccount = param_client.get_parameter(Name=parameter_path)['Parameter']['Value'] - return toolingAccount - def create_role(self, account_id, role_name, test_role_name): policy_doc = { 'Version': '2012-10-17', @@ -38,7 +30,6 @@ def create_role(self, account_id, role_name, test_role_name): 'Principal': { 'AWS': [ f'arn:aws:iam::{account_id}:root', - f'arn:aws:iam::{IAMClient.get_tooling_account_id()}:root', f'arn:aws:sts::{account_id}:assumed-role/{test_role_name}/{test_role_name}', ] }, From 18ab1e0b136938f1761986774f1893a94ae71b99 Mon Sep 17 00:00:00 2001 From: Sofia Sazonova Date: Fri, 27 Sep 2024 14:34:32 +0100 Subject: [PATCH 04/16] remove iam role after test --- backend/dataall/core/environment/cdk/environment_stack.py | 2 +- tests_new/integration_tests/aws_clients/iam.py | 7 +++++++ tests_new/integration_tests/modules/share_base/conftest.py | 1 + .../modules/share_base/test_new_crossacc_s3_share.py | 4 ++++ 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/backend/dataall/core/environment/cdk/environment_stack.py b/backend/dataall/core/environment/cdk/environment_stack.py index b9935c9c9..26bad2360 100644 --- a/backend/dataall/core/environment/cdk/environment_stack.py +++ b/backend/dataall/core/environment/cdk/environment_stack.py @@ -657,7 +657,7 @@ def create_integration_tests_role(self): self.test_role.add_to_policy( iam.PolicyStatement( - actions=['iam:GetRole', 'iam:CreateRole', 'iam:PutRolePolicy'], + actions=['iam:GetRole', 'iam:CreateRole', 'iam:DeleteRole', 'iam:PutRolePolicy'], effect=iam.Effect.ALLOW, resources=[f'arn:aws:iam::{self.account}:role/dataall-test-*'], ), diff --git a/tests_new/integration_tests/aws_clients/iam.py b/tests_new/integration_tests/aws_clients/iam.py index 99b94980d..58a1ea918 100644 --- a/tests_new/integration_tests/aws_clients/iam.py +++ b/tests_new/integration_tests/aws_clients/iam.py @@ -21,6 +21,13 @@ def get_role(self, role_name): log.info(f'Error occurred: {e}') return None + def delete_role(self, role_name): + try: + self._client.delete_role(RoleName=role_name) + except Exception as e: + log.error(e) + raise e + def create_role(self, account_id, role_name, test_role_name): policy_doc = { 'Version': '2012-10-17', diff --git a/tests_new/integration_tests/modules/share_base/conftest.py b/tests_new/integration_tests/modules/share_base/conftest.py index 3abc1bd9c..28a34cc90 100644 --- a/tests_new/integration_tests/modules/share_base/conftest.py +++ b/tests_new/integration_tests/modules/share_base/conftest.py @@ -51,6 +51,7 @@ def consumption_role_1(client5, group5, session_cross_acc_env_1, session_cross_a ) yield consumption_role remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) + iam_client.delete_role(role['Role']['RoleName']) @pytest.fixture(scope='session') diff --git a/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py b/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py index 035d2e254..819357488 100644 --- a/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py +++ b/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py @@ -277,3 +277,7 @@ def test_revoke_succeeded(client1, share_params_main): assert_that(updated_share.status).is_equal_to('Processed') assert_that(items).extracting('status').contains_only('Revoke_Succeeded') assert_that(items).extracting('itemType').contains(*ALL_S3_SHARABLE_TYPES_NAMES) + + +def test_1(consumption_role_1): + print(consumption_role_1.IAMRoleArn) From f4aec98fe06875a02b1a2ccc4150fb41474d8bb1 Mon Sep 17 00:00:00 2001 From: Sofia Sazonova Date: Mon, 30 Sep 2024 11:28:06 +0100 Subject: [PATCH 05/16] PR changes --- .../core/environment/cdk/environment_stack.py | 8 +++++++- tests_new/integration_tests/aws_clients/iam.py | 17 ++++++++++------- .../modules/share_base/conftest.py | 2 +- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/backend/dataall/core/environment/cdk/environment_stack.py b/backend/dataall/core/environment/cdk/environment_stack.py index 26bad2360..451921ace 100644 --- a/backend/dataall/core/environment/cdk/environment_stack.py +++ b/backend/dataall/core/environment/cdk/environment_stack.py @@ -657,7 +657,13 @@ def create_integration_tests_role(self): self.test_role.add_to_policy( iam.PolicyStatement( - actions=['iam:GetRole', 'iam:CreateRole', 'iam:DeleteRole', 'iam:PutRolePolicy'], + actions=[ + 'iam:GetRole', + 'iam:CreateRole', + 'iam:DeleteRole', + 'iam:PutRolePolicy', + 'iam:DeleteRolePolicy', + ], effect=iam.Effect.ALLOW, resources=[f'arn:aws:iam::{self.account}:role/dataall-test-*'], ), diff --git a/tests_new/integration_tests/aws_clients/iam.py b/tests_new/integration_tests/aws_clients/iam.py index 58a1ea918..eb0e1b885 100644 --- a/tests_new/integration_tests/aws_clients/iam.py +++ b/tests_new/integration_tests/aws_clients/iam.py @@ -8,6 +8,8 @@ class IAMClient: + CONSUMPTION_POLICY_NAME = 'ConsumptionPolicy' + def __init__(self, session=boto3.Session(), region=os.environ.get('AWS_REGION', 'us-east-1')): self._client = session.client('iam', region_name=region) self._resource = session.resource('iam', region_name=region) @@ -56,12 +58,6 @@ def create_role(self, account_id, role_name, test_role_name): log.error(e) raise e - def create_role_if_not_exists(self, account_id, role_name, test_role_name): - role = self.get_role(role_name) - if role is None: - role = self.create_role(account_id, role_name, test_role_name) - return role - def get_consumption_role(self, account_id, role_name, test_role_name): role = self.get_role(role_name) if role is None: @@ -69,10 +65,17 @@ def get_consumption_role(self, account_id, role_name, test_role_name): self.put_consumption_role_policy(role_name) return role + def delete_policy(self, role_name, policy_name): + self._client.delete_role_policy(RoleName=role_name, PolicyName=policy_name) + + def delete_consumption_role(self, role_name): + self.delete_policy(role_name, self.CONSUMPTION_POLICY_NAME) + self.delete_role(role_name) + def put_consumption_role_policy(self, role_name): self._client.put_role_policy( RoleName=role_name, - PolicyName='ConsumptionPolicy', + PolicyName=self.CONSUMPTION_POLICY_NAME, PolicyDocument="""{ "Version": "2012-10-17", "Statement": [ diff --git a/tests_new/integration_tests/modules/share_base/conftest.py b/tests_new/integration_tests/modules/share_base/conftest.py index 28a34cc90..a44ebe722 100644 --- a/tests_new/integration_tests/modules/share_base/conftest.py +++ b/tests_new/integration_tests/modules/share_base/conftest.py @@ -51,7 +51,7 @@ def consumption_role_1(client5, group5, session_cross_acc_env_1, session_cross_a ) yield consumption_role remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) - iam_client.delete_role(role['Role']['RoleName']) + iam_client.delete_consumption_role(role['Role']['RoleName']) @pytest.fixture(scope='session') From 8ba7f1a18fdd6302ce1756d0a1312dfd3c2ba9ea Mon Sep 17 00:00:00 2001 From: Sofia Sazonova Date: Mon, 30 Sep 2024 11:28:45 +0100 Subject: [PATCH 06/16] remove unused test --- .../modules/share_base/test_new_crossacc_s3_share.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py b/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py index 819357488..035d2e254 100644 --- a/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py +++ b/tests_new/integration_tests/modules/share_base/test_new_crossacc_s3_share.py @@ -277,7 +277,3 @@ def test_revoke_succeeded(client1, share_params_main): assert_that(updated_share.status).is_equal_to('Processed') assert_that(items).extracting('status').contains_only('Revoke_Succeeded') assert_that(items).extracting('itemType').contains(*ALL_S3_SHARABLE_TYPES_NAMES) - - -def test_1(consumption_role_1): - print(consumption_role_1.IAMRoleArn) From 36d2dd07df7dee54ee361020568c0e567005b8e9 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Mon, 30 Sep 2024 12:50:37 -0400 Subject: [PATCH 07/16] add notification tests --- tests_new/integration_tests/conftest.py | 1 + .../modules/notifications/conftest.py | 13 -- .../modules/notifications/queries.py | 8 +- .../notifications/test_noitifications.py | 36 ---- .../notifications/test_notifications.py | 58 +++++++ .../modules/share_base/conftest.py | 160 ----------------- .../modules/share_base/global_conftest.py | 161 ++++++++++++++++++ 7 files changed, 223 insertions(+), 214 deletions(-) delete mode 100644 tests_new/integration_tests/modules/notifications/conftest.py delete mode 100644 tests_new/integration_tests/modules/notifications/test_noitifications.py create mode 100644 tests_new/integration_tests/modules/notifications/test_notifications.py create mode 100644 tests_new/integration_tests/modules/share_base/global_conftest.py diff --git a/tests_new/integration_tests/conftest.py b/tests_new/integration_tests/conftest.py index 89464e029..3f45037e8 100644 --- a/tests_new/integration_tests/conftest.py +++ b/tests_new/integration_tests/conftest.py @@ -16,6 +16,7 @@ 'integration_tests.core.organizations.global_conftest', 'integration_tests.core.environment.global_conftest', 'integration_tests.modules.s3_datasets.global_conftest', + 'integration_tests.modules.share_base.global_conftest', ] diff --git a/tests_new/integration_tests/modules/notifications/conftest.py b/tests_new/integration_tests/modules/notifications/conftest.py deleted file mode 100644 index 6e3e3951a..000000000 --- a/tests_new/integration_tests/modules/notifications/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -# import pytest -# from integration_tests.core.vpc.queries import create_network, delete_network - - -# @pytest.fixture(scope='function') -# def notification1(client1): -# notification = None -# try: -# notification = create_notification(...) -# yield notification -# finally: -# if notification: -# delete_notification(client1, uri==notification.notificationUri) diff --git a/tests_new/integration_tests/modules/notifications/queries.py b/tests_new/integration_tests/modules/notifications/queries.py index e9d3f96d2..a38405f3a 100644 --- a/tests_new/integration_tests/modules/notifications/queries.py +++ b/tests_new/integration_tests/modules/notifications/queries.py @@ -29,11 +29,10 @@ def delete_notification(client, uri): return response.data.deleteNotification -## TODO -def list_notifications(client): +def list_notifications(client, filter={}): query = { 'operationName': 'listNotifications', - 'variables': {'filter': {'term': ''}}, + 'variables': {'filter': filter}, 'query': """ query listNotifications($filter: NotificationFilter) { listNotifications(filter: $filter) { @@ -71,7 +70,6 @@ def count_unread_notificiations(client): return response.data.countUnreadNotifications -## TODO def count_read_notificiations(client): query = { 'operationName': 'countReadNotifications', @@ -97,4 +95,4 @@ def count_deleted_notificiations(client): """, } response = client.query(query=query) - return response.data.countUnreadNotifications + return response.data.countDeletedNotifications diff --git a/tests_new/integration_tests/modules/notifications/test_noitifications.py b/tests_new/integration_tests/modules/notifications/test_noitifications.py deleted file mode 100644 index 5b3fbfe63..000000000 --- a/tests_new/integration_tests/modules/notifications/test_noitifications.py +++ /dev/null @@ -1,36 +0,0 @@ -from assertpy import assert_that - -from integration_tests.errors import GqlError -from integration_tests.modules.notifications.queries import ( - list_notifications, - count_deleted_notificiations, - count_read_notificiations, - count_unread_notificiations, - mark_notification_read, -) - - -# TODO -def test_create_notification(client1, notification): - # assert_that - pass - - -# TODO -def test_list_notification(client1, notification): - pass - - -# TODO -def test_count_unread_notification(client1, notification): - pass - - -# TODO -def test_read_notification(client1, notification): - pass - - -# TODO -def test_delete_notificaiont_notification(client1, notification): - pass diff --git a/tests_new/integration_tests/modules/notifications/test_notifications.py b/tests_new/integration_tests/modules/notifications/test_notifications.py new file mode 100644 index 000000000..c9c1caefc --- /dev/null +++ b/tests_new/integration_tests/modules/notifications/test_notifications.py @@ -0,0 +1,58 @@ +from assertpy import assert_that + +from integration_tests.errors import GqlError +from integration_tests.modules.notifications.queries import ( + list_notifications, + count_deleted_notificiations, + count_read_notificiations, + count_unread_notificiations, + mark_notification_read, + delete_notification, +) + + +def test_list_notification(client1, session_share_1): + response = list_notifications(client1) + assert_that(response).is_not_none() + assert_that(response.nodes).is_not_empty() + assert_that(response.nodes[0].notificationUri).is_not_none() + + assert_that(list_notifications(client1, filter={'read': True})).is_not_none() + assert_that(list_notifications(client1, filter={'unread': True})).is_not_none() + assert_that(list_notifications(client1, filter={'archived': True})).is_not_none() + + +def test_count_unread_notification(client1, session_share_1): + assert_that(count_unread_notificiations(client1)).is_greater_than_or_equal_to(0) + + +def test_count_read_notification(client1, session_share_1): + assert_that(count_read_notificiations(client1)).is_greater_than_or_equal_to(0) + + +def test_read_notification_invalid(client1, session_share_1): + assert_that(mark_notification_read).raises(GqlError).when_called_with(client1, '').contains( + 'RequiredParameter', 'URI' + ) + + +def test_read_notification(client1, session_share_1): + count_unread = count_unread_notificiations(client1) + count_read = count_read_notificiations(client1) + + response = list_notifications(client1) + mark_notification_read(client1, response.nodes[0].notificationUri) + + assert_that(count_unread_notificiations(client1)).is_equal_to(count_unread - 1) + assert_that(count_read_notificiations(client1)).is_equal_to(count_read + 1) + + +def test_delete_notification_invalid(client1, session_share_1): + assert_that(delete_notification).raises(GqlError).when_called_with(client1, '').contains('RequiredParameter', 'URI') + + +def test_delete_notification(client1, session_share_1): + count_deleted = count_deleted_notificiations(client1) + response = list_notifications(client1, {'unread': True, 'read': True}) + delete_notification(client1, response.nodes[0].notificationUri) + assert_that(count_deleted_notificiations(client1)).is_equal_to(count_deleted + 1) diff --git a/tests_new/integration_tests/modules/share_base/conftest.py b/tests_new/integration_tests/modules/share_base/conftest.py index 3abc1bd9c..4fce9d514 100644 --- a/tests_new/integration_tests/modules/share_base/conftest.py +++ b/tests_new/integration_tests/modules/share_base/conftest.py @@ -1,165 +1,5 @@ import pytest -from tests_new.integration_tests.aws_clients.iam import IAMClient -from tests_new.integration_tests.core.environment.queries import add_consumption_role, remove_consumption_role -from tests_new.integration_tests.modules.share_base.queries import ( - create_share_object, - delete_share_object, - get_share_object, - revoke_share_items, -) -from tests_new.integration_tests.modules.share_base.utils import check_share_ready - -test_cons_role_name = 'dataall-test-ShareTestConsumptionRole' - - -def revoke_all_possible(client, shareUri): - share = get_share_object(client, shareUri, {'isShared': True}) - statuses_can_delete = [ - 'PendingApproval', - 'Revoke_Succeeded', - 'Share_Failed', - 'Share_Rejected', - ] - - shareItemUris = [item.shareItemUri for item in share['items'].nodes if item.status not in statuses_can_delete] - if shareItemUris: - revoke_share_items(client, shareUri, shareItemUris) - - -def clean_up_share(client, shareUri): - check_share_ready(client, shareUri) - revoke_all_possible(client, shareUri) - check_share_ready(client, shareUri) - delete_share_object(client, shareUri) - - -@pytest.fixture(scope='session') -def consumption_role_1(client5, group5, session_cross_acc_env_1, session_cross_acc_env_1_aws_client): - iam_client = IAMClient(session=session_cross_acc_env_1_aws_client, region=session_cross_acc_env_1['region']) - role = iam_client.get_consumption_role( - session_cross_acc_env_1.AwsAccountId, - test_cons_role_name, - f'dataall-integration-tests-role-{session_cross_acc_env_1.region}', - ) - consumption_role = add_consumption_role( - client5, - session_cross_acc_env_1.environmentUri, - group5, - 'ShareTestConsumptionRole', - role['Role']['Arn'], - ) - yield consumption_role - remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) - - -@pytest.fixture(scope='session') -def session_share_1( - client5, - client1, - session_cross_acc_env_1, - session_s3_dataset1, - session_s3_dataset1_tables, - session_s3_dataset1_folders, - group5, -): - share1 = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=group5, - principalType='Group', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share1 = get_share_object(client5, share1.shareUri) - yield share1 - clean_up_share(client5, share1.shareUri) - - -@pytest.fixture(scope='session') -def session_share_2( - client5, - client1, - session_cross_acc_env_1, - session_imported_sse_s3_dataset1, - session_imported_sse_s3_dataset1_tables, - session_imported_sse_s3_dataset1_folders, - group5, -): - share2 = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=group5, - principalType='Group', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share2 = get_share_object(client5, share2.shareUri) - yield share2 - - clean_up_share(client5, share2.shareUri) - - -@pytest.fixture(scope='session') -def session_share_consrole_1( - client5, - client1, - session_cross_acc_env_1, - session_s3_dataset1, - session_s3_dataset1_tables, - session_s3_dataset1_folders, - group5, - consumption_role_1, -): - share1cr = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=consumption_role_1.consumptionRoleUri, - principalType='ConsumptionRole', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share1cr = get_share_object(client5, share1cr.shareUri) - yield share1cr - clean_up_share(client5, share1cr.shareUri) - - -@pytest.fixture(scope='session') -def session_share_consrole_2( - client5, - client1, - session_cross_acc_env_1, - session_imported_sse_s3_dataset1, - session_imported_sse_s3_dataset1_tables, - session_imported_sse_s3_dataset1_folders, - group5, - consumption_role_1, -): - share2cr = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=consumption_role_1.consumptionRoleUri, - principalType='ConsumptionRole', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share2cr = get_share_object(client5, share2cr.shareUri) - yield share2cr - - clean_up_share(client5, share2cr.shareUri) - @pytest.fixture(params=['Group', 'ConsumptionRole']) def principal1(request, group5, consumption_role_1): diff --git a/tests_new/integration_tests/modules/share_base/global_conftest.py b/tests_new/integration_tests/modules/share_base/global_conftest.py new file mode 100644 index 000000000..b37d6c9e0 --- /dev/null +++ b/tests_new/integration_tests/modules/share_base/global_conftest.py @@ -0,0 +1,161 @@ +import pytest + +from tests_new.integration_tests.aws_clients.iam import IAMClient +from tests_new.integration_tests.core.environment.queries import add_consumption_role, remove_consumption_role +from tests_new.integration_tests.modules.share_base.queries import ( + create_share_object, + delete_share_object, + get_share_object, + revoke_share_items, +) +from tests_new.integration_tests.modules.share_base.utils import check_share_ready + +test_cons_role_name = 'dataall-test-ShareTestConsumptionRole' + + +def revoke_all_possible(client, shareUri): + share = get_share_object(client, shareUri, {'isShared': True}) + statuses_can_delete = [ + 'PendingApproval', + 'Revoke_Succeeded', + 'Share_Failed', + 'Share_Rejected', + ] + + shareItemUris = [item.shareItemUri for item in share['items'].nodes if item.status not in statuses_can_delete] + if shareItemUris: + revoke_share_items(client, shareUri, shareItemUris) + + +def clean_up_share(client, shareUri): + check_share_ready(client, shareUri) + revoke_all_possible(client, shareUri) + check_share_ready(client, shareUri) + delete_share_object(client, shareUri) + + +@pytest.fixture(scope='session') +def consumption_role_1(client5, group5, session_cross_acc_env_1, session_cross_acc_env_1_aws_client): + iam_client = IAMClient(session=session_cross_acc_env_1_aws_client, region=session_cross_acc_env_1['region']) + role = iam_client.get_consumption_role( + session_cross_acc_env_1.AwsAccountId, + test_cons_role_name, + f'dataall-integration-tests-role-{session_cross_acc_env_1.region}', + ) + consumption_role = add_consumption_role( + client5, + session_cross_acc_env_1.environmentUri, + group5, + 'ShareTestConsumptionRole', + role['Role']['Arn'], + ) + yield consumption_role + remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) + + +@pytest.fixture(scope='session') +def session_share_1( + client5, + client1, + session_cross_acc_env_1, + session_s3_dataset1, + session_s3_dataset1_tables, + session_s3_dataset1_folders, + group5, +): + share1 = create_share_object( + client=client5, + dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, + environmentUri=session_cross_acc_env_1.environmentUri, + groupUri=group5, + principalId=group5, + principalType='Group', + requestPurpose='test create share object', + attachMissingPolicies=True, + permissions=['Read'], + ) + share1 = get_share_object(client5, share1.shareUri) + yield share1 + clean_up_share(client5, share1.shareUri) + + +@pytest.fixture(scope='session') +def session_share_2( + client5, + client1, + session_cross_acc_env_1, + session_imported_sse_s3_dataset1, + session_imported_sse_s3_dataset1_tables, + session_imported_sse_s3_dataset1_folders, + group5, +): + share2 = create_share_object( + client=client5, + dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, + environmentUri=session_cross_acc_env_1.environmentUri, + groupUri=group5, + principalId=group5, + principalType='Group', + requestPurpose='test create share object', + attachMissingPolicies=True, + permissions=['Read'], + ) + share2 = get_share_object(client5, share2.shareUri) + yield share2 + + clean_up_share(client5, share2.shareUri) + + +@pytest.fixture(scope='session') +def session_share_consrole_1( + client5, + client1, + session_cross_acc_env_1, + session_s3_dataset1, + session_s3_dataset1_tables, + session_s3_dataset1_folders, + group5, + consumption_role_1, +): + share1cr = create_share_object( + client=client5, + dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, + environmentUri=session_cross_acc_env_1.environmentUri, + groupUri=group5, + principalId=consumption_role_1.consumptionRoleUri, + principalType='ConsumptionRole', + requestPurpose='test create share object', + attachMissingPolicies=True, + permissions=['Read'], + ) + share1cr = get_share_object(client5, share1cr.shareUri) + yield share1cr + clean_up_share(client5, share1cr.shareUri) + + +@pytest.fixture(scope='session') +def session_share_consrole_2( + client5, + client1, + session_cross_acc_env_1, + session_imported_sse_s3_dataset1, + session_imported_sse_s3_dataset1_tables, + session_imported_sse_s3_dataset1_folders, + group5, + consumption_role_1, +): + share2cr = create_share_object( + client=client5, + dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, + environmentUri=session_cross_acc_env_1.environmentUri, + groupUri=group5, + principalId=consumption_role_1.consumptionRoleUri, + principalType='ConsumptionRole', + requestPurpose='test create share object', + attachMissingPolicies=True, + permissions=['Read'], + ) + share2cr = get_share_object(client5, share2cr.shareUri) + yield share2cr + + clean_up_share(client5, share2cr.shareUri) From 298e244707d6f312bb839e7d47d5321c24f5de34 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Tue, 1 Oct 2024 09:54:06 -0400 Subject: [PATCH 08/16] Remove countRead, countUnread, and delete Notification Ops --- .../modules/notifications/api/mutations.py | 7 --- .../modules/notifications/api/resolvers.py | 20 --------- .../db/notification_repositories.py | 29 ------------ .../Notification/archiveNotification.js | 12 ----- .../Notification/countDeletedNotifications.js | 10 ----- .../Notification/countReadNotifications.js | 10 ----- .../modules/notifications/queries.py | 44 +------------------ .../notifications/test_notifications.py | 37 ++++------------ 8 files changed, 9 insertions(+), 160 deletions(-) delete mode 100644 frontend/src/services/graphql/Notification/archiveNotification.js delete mode 100644 frontend/src/services/graphql/Notification/countDeletedNotifications.js delete mode 100644 frontend/src/services/graphql/Notification/countReadNotifications.js diff --git a/backend/dataall/modules/notifications/api/mutations.py b/backend/dataall/modules/notifications/api/mutations.py index a38936ae6..0228099b3 100644 --- a/backend/dataall/modules/notifications/api/mutations.py +++ b/backend/dataall/modules/notifications/api/mutations.py @@ -10,10 +10,3 @@ type=gql.Boolean, resolver=mark_as_read, ) - -deleteNotification = gql.MutationField( - name='deleteNotification', - args=[gql.Argument(name='notificationUri', type=gql.String)], - type=gql.Boolean, - resolver=delete, -) diff --git a/backend/dataall/modules/notifications/api/resolvers.py b/backend/dataall/modules/notifications/api/resolvers.py index 783a4eefe..40989a4a2 100644 --- a/backend/dataall/modules/notifications/api/resolvers.py +++ b/backend/dataall/modules/notifications/api/resolvers.py @@ -48,23 +48,3 @@ def count_unread_notifications(context: Context, source): return NotificationRepository.count_unread_notifications( session=session, username=get_context().username, groups=get_context().groups ) - - -def count_deleted_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_deleted_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) - - -def count_read_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_read_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) - - -def delete(context: Context, source, notificationUri): - _required_uri(notificationUri) - with _session() as session: - return NotificationRepository.delete_notification(session=session, notificationUri=notificationUri) diff --git a/backend/dataall/modules/notifications/db/notification_repositories.py b/backend/dataall/modules/notifications/db/notification_repositories.py index 34ff7d1c6..e60c01729 100644 --- a/backend/dataall/modules/notifications/db/notification_repositories.py +++ b/backend/dataall/modules/notifications/db/notification_repositories.py @@ -66,38 +66,9 @@ def count_unread_notifications(session, username, groups): ) return int(count) - @staticmethod - def count_read_notifications(session, username, groups): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(or_(models.Notification.recipient == username, models.Notification.recipient.in_(groups))) - .filter(models.Notification.is_read == True) - .filter(models.Notification.deleted.is_(None)) - .scalar() - ) - return int(count) - - @staticmethod - def count_deleted_notifications(session, username, groups): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(or_(models.Notification.recipient == username, models.Notification.recipient.in_(groups))) - .filter(models.Notification.deleted.isnot(None)) - .scalar() - ) - return int(count) - @staticmethod def read_notification(session, notificationUri): notification = session.query(models.Notification).get(notificationUri) notification.is_read = True session.commit() return True - - @staticmethod - def delete_notification(session, notificationUri): - notification = session.query(models.Notification).get(notificationUri) - if notification: - notification.deleted = datetime.now() - session.commit() - return True diff --git a/frontend/src/services/graphql/Notification/archiveNotification.js b/frontend/src/services/graphql/Notification/archiveNotification.js deleted file mode 100644 index e6b719055..000000000 --- a/frontend/src/services/graphql/Notification/archiveNotification.js +++ /dev/null @@ -1,12 +0,0 @@ -import { gql } from 'apollo-boost'; - -export const archiveNotification = ({ notificationUri }) => ({ - variables: { - notificationUri - }, - mutation: gql` - mutation deleteNotification($notificationUri: String!) { - deleteNotification(notificationUri: $notificationUri) - } - ` -}); diff --git a/frontend/src/services/graphql/Notification/countDeletedNotifications.js b/frontend/src/services/graphql/Notification/countDeletedNotifications.js deleted file mode 100644 index 42ea4f4e9..000000000 --- a/frontend/src/services/graphql/Notification/countDeletedNotifications.js +++ /dev/null @@ -1,10 +0,0 @@ -import { gql } from 'apollo-boost'; - -export const countDeletedNotifications = () => ({ - variables: {}, - query: gql` - query countDeletedNotifications { - countDeletedNotifications - } - ` -}); diff --git a/frontend/src/services/graphql/Notification/countReadNotifications.js b/frontend/src/services/graphql/Notification/countReadNotifications.js deleted file mode 100644 index dcd855237..000000000 --- a/frontend/src/services/graphql/Notification/countReadNotifications.js +++ /dev/null @@ -1,10 +0,0 @@ -import { gql } from 'apollo-boost'; - -export const countReadNotifications = () => ({ - variables: {}, - query: gql` - query countReadNotifications { - countReadNotifications - } - ` -}); diff --git a/tests_new/integration_tests/modules/notifications/queries.py b/tests_new/integration_tests/modules/notifications/queries.py index a38405f3a..81259e648 100644 --- a/tests_new/integration_tests/modules/notifications/queries.py +++ b/tests_new/integration_tests/modules/notifications/queries.py @@ -15,20 +15,6 @@ def mark_notification_read(client, uri): return response.data.markNotificationAsRead -def delete_notification(client, uri): - query = { - 'operationName': 'deleteNotification', - 'variables': {'notificationUri': uri}, - 'query': """ - mutation deleteNotification($notificationUri: String!) { - deleteNotification(notificationUri: $notificationUri) - } - """, - } - response = client.query(query=query) - return response.data.deleteNotification - - def list_notifications(client, filter={}): query = { 'operationName': 'listNotifications', @@ -56,7 +42,7 @@ def list_notifications(client, filter={}): return response.data.listNotifications -def count_unread_notificiations(client): +def count_unread_notifications(client): query = { 'operationName': 'countUnreadNotifications', 'variables': {}, @@ -68,31 +54,3 @@ def count_unread_notificiations(client): } response = client.query(query=query) return response.data.countUnreadNotifications - - -def count_read_notificiations(client): - query = { - 'operationName': 'countReadNotifications', - 'variables': {}, - 'query': """ - query countReadNotifications { - countReadNotifications - } - """, - } - response = client.query(query=query) - return response.data.countReadNotifications - - -def count_deleted_notificiations(client): - query = { - 'operationName': 'countDeletedNotifications', - 'variables': {}, - 'query': """ - query countDeletedNotifications { - countDeletedNotifications - } - """, - } - response = client.query(query=query) - return response.data.countDeletedNotifications diff --git a/tests_new/integration_tests/modules/notifications/test_notifications.py b/tests_new/integration_tests/modules/notifications/test_notifications.py index 995af56e0..d8d55875a 100644 --- a/tests_new/integration_tests/modules/notifications/test_notifications.py +++ b/tests_new/integration_tests/modules/notifications/test_notifications.py @@ -1,30 +1,22 @@ -import pytest from assertpy import assert_that from integration_tests.errors import GqlError from integration_tests.modules.notifications.queries import ( list_notifications, - count_deleted_notificiations, - count_read_notificiations, - count_unread_notificiations, + count_unread_notifications, mark_notification_read, - delete_notification, ) def test_list_notification(client1): - assert_that(list_notifications(client1)).is_not_none() - assert_that(list_notifications(client1, filter={'read': True})).is_not_none() - assert_that(list_notifications(client1, filter={'unread': True})).is_not_none() - assert_that(list_notifications(client1, filter={'archived': True})).is_not_none() + assert_that(list_notifications(client1)).contains_key('page', 'pages', 'nodes', 'count') + assert_that(list_notifications(client1, filter={'read': True})).contains_key('page', 'pages', 'nodes', 'count') + assert_that(list_notifications(client1, filter={'unread': True})).contains_key('page', 'pages', 'nodes', 'count') + assert_that(list_notifications(client1, filter={'archived': True})).contains_key('page', 'pages', 'nodes', 'count') def test_count_unread_notification(client1): - assert_that(count_unread_notificiations(client1)).is_greater_than_or_equal_to(0) - - -def test_count_read_notification(client1): - assert_that(count_read_notificiations(client1)).is_greater_than_or_equal_to(0) + assert_that(count_unread_notifications(client1)).is_greater_than_or_equal_to(0) def test_read_notification_invalid(client1): @@ -34,22 +26,9 @@ def test_read_notification_invalid(client1): def test_read_notification(client1, session_share_1_notifications): - count_unread = count_unread_notificiations(client1) - count_read = count_read_notificiations(client1) + count_unread = count_unread_notifications(client1) response = list_notifications(client1) mark_notification_read(client1, response.nodes[0].notificationUri) - assert_that(count_unread_notificiations(client1)).is_equal_to(count_unread - 1) - assert_that(count_read_notificiations(client1)).is_equal_to(count_read + 1) - - -def test_delete_notification_invalid(client1): - assert_that(delete_notification).raises(GqlError).when_called_with(client1, '').contains('RequiredParameter', 'URI') - - -def test_delete_notification(client1, session_share_1_notifications): - count_deleted = count_deleted_notificiations(client1) - response = list_notifications(client1, {'read': True}) - delete_notification(client1, response.nodes[0].notificationUri) - assert_that(count_deleted_notificiations(client1)).is_equal_to(count_deleted + 1) + assert_that(count_unread_notifications(client1)).is_equal_to(count_unread - 1) From 00633e390604ff67feb2b99042ac07de25d8879c Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Tue, 1 Oct 2024 11:48:30 -0400 Subject: [PATCH 09/16] Missed cleanup files --- .../dataall/modules/notifications/api/queries.py | 15 --------------- .../src/services/graphql/Notification/index.js | 3 --- 2 files changed, 18 deletions(-) diff --git a/backend/dataall/modules/notifications/api/queries.py b/backend/dataall/modules/notifications/api/queries.py index 95ba57aaa..ef89ec79c 100644 --- a/backend/dataall/modules/notifications/api/queries.py +++ b/backend/dataall/modules/notifications/api/queries.py @@ -1,7 +1,5 @@ from dataall.base.api import gql from .resolvers import ( - count_deleted_notifications, - count_read_notifications, count_unread_notifications, list_my_notifications, ) @@ -22,16 +20,3 @@ resolver=count_unread_notifications, ) -# Not used in frontend -countReadNotifications = gql.QueryField( - name='countReadNotifications', - type=gql.Integer, - resolver=count_read_notifications, -) - -# Not used in frontend -countDeletedNotifications = gql.QueryField( - name='countDeletedNotifications', - type=gql.Integer, - resolver=count_deleted_notifications, -) diff --git a/frontend/src/services/graphql/Notification/index.js b/frontend/src/services/graphql/Notification/index.js index b050f4df8..4deab1497 100644 --- a/frontend/src/services/graphql/Notification/index.js +++ b/frontend/src/services/graphql/Notification/index.js @@ -1,6 +1,3 @@ -export * from './archiveNotification'; -export * from './countDeletedNotifications'; -export * from './countReadNotifications'; export * from './countUnreadNotifications'; export * from './listNotifications'; export * from './markAsRead'; From 6b1f55667777421ff5e612f5ae1f65f96645c9ba Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 2 Oct 2024 09:57:37 -0400 Subject: [PATCH 10/16] ruff --- tests_new/integration_tests/modules/share_base/conftest.py | 1 + .../integration_tests/modules/share_base/global_conftest.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests_new/integration_tests/modules/share_base/conftest.py b/tests_new/integration_tests/modules/share_base/conftest.py index 871876700..b9a14160b 100644 --- a/tests_new/integration_tests/modules/share_base/conftest.py +++ b/tests_new/integration_tests/modules/share_base/conftest.py @@ -8,6 +8,7 @@ def principal1(request, group5, session_consumption_role_1): else: yield session_consumption_role_1.consumptionRoleUri, request.param + # --------------SESSION PARAM FIXTURES---------------------------- diff --git a/tests_new/integration_tests/modules/share_base/global_conftest.py b/tests_new/integration_tests/modules/share_base/global_conftest.py index 3b54d32a2..968fdc464 100644 --- a/tests_new/integration_tests/modules/share_base/global_conftest.py +++ b/tests_new/integration_tests/modules/share_base/global_conftest.py @@ -184,6 +184,7 @@ def session_share_consrole_2( clean_up_share(client5, share2cr.shareUri) + # --------------PERSISTENT FIXTURES---------------------------- @@ -271,4 +272,4 @@ def persistent_role_share_1( submit_share_object(client5, share1.shareUri) approve_share_object(client1, share1.shareUri) check_share_ready(client5, share1.shareUri) - yield get_share_object(client5, share1.shareUri) \ No newline at end of file + yield get_share_object(client5, share1.shareUri) From f9c3e1bd1c4c4e5792ff00a4ef12109b7591f5f3 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Wed, 2 Oct 2024 09:59:28 -0400 Subject: [PATCH 11/16] clean up delete notificiation --- backend/dataall/modules/notifications/api/mutations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/notifications/api/mutations.py b/backend/dataall/modules/notifications/api/mutations.py index 0228099b3..991f559f3 100644 --- a/backend/dataall/modules/notifications/api/mutations.py +++ b/backend/dataall/modules/notifications/api/mutations.py @@ -1,5 +1,5 @@ from dataall.base.api import gql -from .resolvers import delete, mark_as_read +from .resolvers import mark_as_read markNotificationAsRead = gql.MutationField( From 741031b7eb89092edace4a135a5c886d1dee4254 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Mon, 28 Oct 2024 15:46:51 -0400 Subject: [PATCH 12/16] Fix notification tests since latest shares test changes --- .../db/notification_repositories.py | 2 +- tests_new/integration_tests/conftest.py | 2 +- .../modules/notifications/conftest.py | 22 -- .../notifications/test_notifications.py | 5 +- .../shares/s3_datasets_shares/conftest.py | 238 ------------------ .../s3_datasets_shares/global_conftest.py | 53 +++- .../test_new_crossacc_s3_share.py | 2 +- 7 files changed, 56 insertions(+), 268 deletions(-) delete mode 100644 tests_new/integration_tests/modules/notifications/conftest.py delete mode 100644 tests_new/integration_tests/modules/shares/s3_datasets_shares/conftest.py diff --git a/backend/dataall/modules/notifications/db/notification_repositories.py b/backend/dataall/modules/notifications/db/notification_repositories.py index 1366b82f9..d8306b503 100644 --- a/backend/dataall/modules/notifications/db/notification_repositories.py +++ b/backend/dataall/modules/notifications/db/notification_repositories.py @@ -90,7 +90,7 @@ def count_unread_notifications(session, username, groups): ) return int(count) - @staticmethod + @staticmethod @NotificationAccess.is_recipient def read_notification(session, notificationUri): notification = session.query(models.Notification).get(notificationUri) diff --git a/tests_new/integration_tests/conftest.py b/tests_new/integration_tests/conftest.py index 543f70911..3ffa46b26 100644 --- a/tests_new/integration_tests/conftest.py +++ b/tests_new/integration_tests/conftest.py @@ -16,8 +16,8 @@ 'integration_tests.core.organizations.global_conftest', 'integration_tests.core.environment.global_conftest', 'integration_tests.modules.s3_datasets.global_conftest', - 'integration_tests.modules.share_base.global_conftest', 'integration_tests.modules.redshift_datasets.global_conftest', + 'integration_tests.modules.shares.s3_datasets_shares.global_conftest', ] diff --git a/tests_new/integration_tests/modules/notifications/conftest.py b/tests_new/integration_tests/modules/notifications/conftest.py deleted file mode 100644 index 33f7009a4..000000000 --- a/tests_new/integration_tests/modules/notifications/conftest.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest - -from tests_new.integration_tests.modules.share_base.queries import ( - submit_share_object, - add_share_item, - get_share_object, - remove_shared_item, -) - - -@pytest.fixture(scope='module') -def session_share_1_notifications(client5, session_share_1): - share_item_uri = None - try: - updated_share = get_share_object(client5, session_share_1.shareUri) - item_to_add = updated_share['items'].nodes[0] - share_item_uri = add_share_item(client5, session_share_1.shareUri, item_to_add.itemUri, item_to_add.itemType) - submit_share_object(client5, session_share_1.shareUri) - yield session_share_1 - finally: - if share_item_uri: - remove_shared_item(client5, share_item_uri) diff --git a/tests_new/integration_tests/modules/notifications/test_notifications.py b/tests_new/integration_tests/modules/notifications/test_notifications.py index d8d55875a..8f95d4fe9 100644 --- a/tests_new/integration_tests/modules/notifications/test_notifications.py +++ b/tests_new/integration_tests/modules/notifications/test_notifications.py @@ -25,10 +25,11 @@ def test_read_notification_invalid(client1): ) -def test_read_notification(client1, session_share_1_notifications): +def test_read_notification(client1, persistent_group_share_1): count_unread = count_unread_notifications(client1) response = list_notifications(client1) mark_notification_read(client1, response.nodes[0].notificationUri) - assert_that(count_unread_notifications(client1)).is_equal_to(count_unread - 1) + # To Handle Case if the Notification Marked as Read was Already Read Earlier + assert_that(count_unread_notifications(client1)).is_less_than_or_equal_to(count_unread) diff --git a/tests_new/integration_tests/modules/shares/s3_datasets_shares/conftest.py b/tests_new/integration_tests/modules/shares/s3_datasets_shares/conftest.py deleted file mode 100644 index 64fd6adfa..000000000 --- a/tests_new/integration_tests/modules/shares/s3_datasets_shares/conftest.py +++ /dev/null @@ -1,238 +0,0 @@ -import pytest - -from tests_new.integration_tests.aws_clients.iam import IAMClient -from tests_new.integration_tests.core.environment.queries import ( - add_consumption_role, - remove_consumption_role, - list_environment_consumption_roles, -) -from tests_new.integration_tests.modules.shares.queries import ( - create_share_object, - delete_share_object, - get_share_object, - revoke_share_items, - submit_share_object, - approve_share_object, - add_share_item, -) -from tests_new.integration_tests.modules.shares.utils import check_share_ready - -test_session_cons_role_name = 'dataall-test-ShareTestConsumptionRole' -test_persistent_cons_role_name = 'dataall-test-PersistentConsumptionRole' - - -def revoke_all_possible(client, shareUri): - share = get_share_object(client, shareUri, {'isShared': True}) - statuses_can_delete = [ - 'PendingApproval', - 'Revoke_Succeeded', - 'Share_Failed', - 'Share_Rejected', - ] - - shareItemUris = [item.shareItemUri for item in share['items'].nodes if item.status not in statuses_can_delete] - if shareItemUris: - revoke_share_items(client, shareUri, shareItemUris) - - -def clean_up_share(client, shareUri): - check_share_ready(client, shareUri) - revoke_all_possible(client, shareUri) - check_share_ready(client, shareUri) - delete_share_object(client, shareUri) - - -def create_consumption_role(client, group, environment, environment_client, iam_role_name, cons_role_name): - iam_client = IAMClient(session=environment_client, region=environment['region']) - role = iam_client.get_consumption_role( - environment.AwsAccountId, - iam_role_name, - f'dataall-integration-tests-role-{environment.region}', - ) - return add_consumption_role( - client, - environment.environmentUri, - group, - cons_role_name, - role['Role']['Arn'], - ) - - -# --------------SESSION PARAM FIXTURES---------------------------- - - -@pytest.fixture(scope='session') -def session_consumption_role_1(client5, group5, session_cross_acc_env_1, session_cross_acc_env_1_aws_client): - consumption_role = create_consumption_role( - client5, - group5, - session_cross_acc_env_1, - session_cross_acc_env_1_aws_client, - test_session_cons_role_name, - 'SessionConsRole1', - ) - yield consumption_role - remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) - iam_client = IAMClient(session=session_cross_acc_env_1_aws_client, region=session_cross_acc_env_1['region']) - iam_client.delete_consumption_role(test_session_cons_role_name) - - -@pytest.fixture(scope='session') -def session_share_1( - client5, - client1, - session_cross_acc_env_1, - session_s3_dataset1, - session_s3_dataset1_tables, - session_s3_dataset1_folders, - group5, -): - share1 = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=group5, - principalType='Group', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share1 = get_share_object(client5, share1.shareUri) - yield share1 - clean_up_share(client5, share1.shareUri) - - -@pytest.fixture(scope='session') -def session_share_2( - client5, - client1, - session_cross_acc_env_1, - session_imported_sse_s3_dataset1, - session_imported_sse_s3_dataset1_tables, - session_imported_sse_s3_dataset1_folders, - group5, -): - share2 = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=group5, - principalType='Group', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share2 = get_share_object(client5, share2.shareUri) - yield share2 - - clean_up_share(client5, share2.shareUri) - - -@pytest.fixture(scope='session') -def session_share_consrole_1( - client5, - client1, - session_cross_acc_env_1, - session_s3_dataset1, - session_s3_dataset1_tables, - session_s3_dataset1_folders, - group5, - session_consumption_role_1, -): - share1cr = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=session_consumption_role_1.consumptionRoleUri, - principalType='ConsumptionRole', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share1cr = get_share_object(client5, share1cr.shareUri) - yield share1cr - clean_up_share(client5, share1cr.shareUri) - - -@pytest.fixture(scope='session') -def session_share_consrole_2( - client5, - client1, - session_cross_acc_env_1, - session_imported_sse_s3_dataset1, - session_imported_sse_s3_dataset1_tables, - session_imported_sse_s3_dataset1_folders, - group5, - session_consumption_role_1, -): - share2cr = create_share_object( - client=client5, - dataset_or_item_params={'datasetUri': session_imported_sse_s3_dataset1.datasetUri}, - environmentUri=session_cross_acc_env_1.environmentUri, - groupUri=group5, - principalId=session_consumption_role_1.consumptionRoleUri, - principalType='ConsumptionRole', - requestPurpose='test create share object', - attachMissingPolicies=True, - permissions=['Read'], - ) - share2cr = get_share_object(client5, share2cr.shareUri) - yield share2cr - - clean_up_share(client5, share2cr.shareUri) - - -@pytest.fixture(params=['Group', 'ConsumptionRole']) -def principal1(request, group5, session_consumption_role_1): - if request.param == 'Group': - yield group5, request.param - else: - yield session_consumption_role_1.consumptionRoleUri, request.param - - -# --------------SESSION PARAM FIXTURES---------------------------- - - -@pytest.fixture(params=['Group', 'ConsumptionRole']) -def share_params_main(request, session_share_1, session_share_consrole_1, session_s3_dataset1): - if request.param == 'Group': - yield session_share_1, session_s3_dataset1 - else: - yield session_share_consrole_1, session_s3_dataset1 - - -@pytest.fixture(params=[(False, 'Group'), (True, 'Group'), (False, 'ConsumptionRole'), (True, 'ConsumptionRole')]) -def share_params_all( - request, - session_share_1, - session_share_consrole_1, - session_s3_dataset1, - session_share_2, - session_share_consrole_2, - session_imported_sse_s3_dataset1, -): - autoapproval, principal_type = request.param - if autoapproval: - if principal_type == 'Group': - yield session_share_2, session_imported_sse_s3_dataset1 - else: - yield session_share_consrole_2, session_imported_sse_s3_dataset1 - else: - if principal_type == 'Group': - yield session_share_1, session_s3_dataset1 - else: - yield session_share_consrole_1, session_s3_dataset1 - - -# --------------PERSISTENT FIXTURES---------------------------- - - -@pytest.fixture(params=['Group', 'ConsumptionRole']) -def persistent_share_params_main(request, persistent_role_share_1, persistent_group_share_1): - if request.param == 'Group': - yield persistent_group_share_1 - else: - yield persistent_role_share_1 diff --git a/tests_new/integration_tests/modules/shares/s3_datasets_shares/global_conftest.py b/tests_new/integration_tests/modules/shares/s3_datasets_shares/global_conftest.py index 968fdc464..f195a545f 100644 --- a/tests_new/integration_tests/modules/shares/s3_datasets_shares/global_conftest.py +++ b/tests_new/integration_tests/modules/shares/s3_datasets_shares/global_conftest.py @@ -6,7 +6,7 @@ remove_consumption_role, list_environment_consumption_roles, ) -from tests_new.integration_tests.modules.share_base.queries import ( +from tests_new.integration_tests.modules.shares.queries import ( create_share_object, delete_share_object, get_share_object, @@ -15,7 +15,7 @@ approve_share_object, add_share_item, ) -from tests_new.integration_tests.modules.share_base.utils import check_share_ready +from tests_new.integration_tests.modules.shares.utils import check_share_ready test_session_cons_role_name = 'dataall-test-ShareTestConsumptionRole' test_persistent_cons_role_name = 'dataall-test-PersistentConsumptionRole' @@ -74,7 +74,7 @@ def session_consumption_role_1(client5, group5, session_cross_acc_env_1, session yield consumption_role remove_consumption_role(client5, session_cross_acc_env_1.environmentUri, consumption_role.consumptionRoleUri) iam_client = IAMClient(session=session_cross_acc_env_1_aws_client, region=session_cross_acc_env_1['region']) - iam_client.delete_consumption_role(consumption_role['Role']['RoleName']) + iam_client.delete_consumption_role(test_session_cons_role_name) @pytest.fixture(scope='session') @@ -185,6 +185,45 @@ def session_share_consrole_2( clean_up_share(client5, share2cr.shareUri) +@pytest.fixture(params=['Group', 'ConsumptionRole']) +def principal1(request, group5, session_consumption_role_1): + if request.param == 'Group': + yield group5, request.param + else: + yield session_consumption_role_1.consumptionRoleUri, request.param + + +@pytest.fixture(params=['Group', 'ConsumptionRole']) +def share_params_main(request, session_share_1, session_share_consrole_1, session_s3_dataset1): + if request.param == 'Group': + yield session_share_1, session_s3_dataset1 + else: + yield session_share_consrole_1, session_s3_dataset1 + + +@pytest.fixture(params=[(False, 'Group'), (True, 'Group'), (False, 'ConsumptionRole'), (True, 'ConsumptionRole')]) +def share_params_all( + request, + session_share_1, + session_share_consrole_1, + session_s3_dataset1, + session_share_2, + session_share_consrole_2, + session_imported_sse_s3_dataset1, +): + autoapproval, principal_type = request.param + if autoapproval: + if principal_type == 'Group': + yield session_share_2, session_imported_sse_s3_dataset1 + else: + yield session_share_consrole_2, session_imported_sse_s3_dataset1 + else: + if principal_type == 'Group': + yield session_share_1, session_s3_dataset1 + else: + yield session_share_consrole_1, session_s3_dataset1 + + # --------------PERSISTENT FIXTURES---------------------------- @@ -273,3 +312,11 @@ def persistent_role_share_1( approve_share_object(client1, share1.shareUri) check_share_ready(client5, share1.shareUri) yield get_share_object(client5, share1.shareUri) + + +@pytest.fixture(params=['Group', 'ConsumptionRole']) +def persistent_share_params_main(request, persistent_role_share_1, persistent_group_share_1): + if request.param == 'Group': + yield persistent_group_share_1 + else: + yield persistent_role_share_1 diff --git a/tests_new/integration_tests/modules/shares/s3_datasets_shares/test_new_crossacc_s3_share.py b/tests_new/integration_tests/modules/shares/s3_datasets_shares/test_new_crossacc_s3_share.py index bae4e0ad9..9caea6fc2 100644 --- a/tests_new/integration_tests/modules/shares/s3_datasets_shares/test_new_crossacc_s3_share.py +++ b/tests_new/integration_tests/modules/shares/s3_datasets_shares/test_new_crossacc_s3_share.py @@ -1,7 +1,7 @@ import pytest from assertpy import assert_that -from integration_tests.modules.shares.s3_datasets_shares.conftest import clean_up_share +from dataall.tests_new.integration_tests.modules.shares.s3_datasets_shares.global_conftest import clean_up_share from tests_new.integration_tests.modules.shares.queries import ( create_share_object, submit_share_object, From e22537e77cdf650a86cdf633dd4a998c2116d233 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Mon, 28 Oct 2024 16:08:34 -0400 Subject: [PATCH 13/16] Add notification service and split logic --- .../modules/notifications/api/resolvers.py | 23 ++---- .../db/notification_repositories.py | 29 +------- .../services/notification_service.py | 70 +++++++++++++++++++ 3 files changed, 75 insertions(+), 47 deletions(-) create mode 100644 backend/dataall/modules/notifications/services/notification_service.py diff --git a/backend/dataall/modules/notifications/api/resolvers.py b/backend/dataall/modules/notifications/api/resolvers.py index 40989a4a2..5dbb2af97 100644 --- a/backend/dataall/modules/notifications/api/resolvers.py +++ b/backend/dataall/modules/notifications/api/resolvers.py @@ -1,19 +1,11 @@ import logging +from dataall.backend.dataall.modules.notifications.services.notification_service import NotificationService from dataall.base.api.context import Context -from dataall.base.context import get_context from dataall.base.db import exceptions -from dataall.modules.notifications.db.notification_repositories import NotificationRepository log = logging.getLogger(__name__) -# For simplicity there is no additional layer for the business logic of notifications as it happens with other more -# complex modules. In the resolvers we check the input and we perform the db calls directly. - - -def _session(): - return get_context().db_engine.scoped_session() - def _required_uri(uri): if not uri: @@ -27,10 +19,7 @@ def list_my_notifications( ): if not filter: filter = {} - with _session() as session: - return NotificationRepository.paginated_notifications( - session=session, username=get_context().username, groups=get_context().groups, filter=filter - ) + return NotificationService.list_my_notifications(filter=filter) def mark_as_read( @@ -39,12 +28,8 @@ def mark_as_read( notificationUri: str = None, ): _required_uri(notificationUri) - with _session() as session: - return NotificationRepository.read_notification(session=session, notificationUri=notificationUri) + return NotificationService.mark_as_read(notificationUri=notificationUri) def count_unread_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_unread_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) + return NotificationService.count_unread_notifications() diff --git a/backend/dataall/modules/notifications/db/notification_repositories.py b/backend/dataall/modules/notifications/db/notification_repositories.py index d8306b503..c18393c67 100644 --- a/backend/dataall/modules/notifications/db/notification_repositories.py +++ b/backend/dataall/modules/notifications/db/notification_repositories.py @@ -1,33 +1,7 @@ -from datetime import datetime - from sqlalchemy import func, and_, or_ from dataall.modules.notifications.db import notification_models as models -from dataall.base.db import paginate, exceptions -from dataall.base.context import get_context -from functools import wraps - - -class NotificationAccess: - @staticmethod - def is_recipient(f): - @wraps(f) - def wrapper(*args, **kwds): - uri = kwds.get('notificationUri') - if not uri: - raise KeyError(f"{f.__name__} doesn't have parameter uri.") - context = get_context() - with context.db_engine.scoped_session() as session: - notification = session.query(models.Notification).get(uri) - if notification and (notification.recipient in context.groups + [context.username]): - return f(*args, **kwds) - else: - raise exceptions.UnauthorizedOperation( - action='UPDATE NOTIFICATION', - message=f'User {context.username} is not the recipient user/group of the notification {uri}', - ) - - return wrapper +from dataall.base.db import paginate class NotificationRepository: @@ -91,7 +65,6 @@ def count_unread_notifications(session, username, groups): return int(count) @staticmethod - @NotificationAccess.is_recipient def read_notification(session, notificationUri): notification = session.query(models.Notification).get(notificationUri) notification.is_read = True diff --git a/backend/dataall/modules/notifications/services/notification_service.py b/backend/dataall/modules/notifications/services/notification_service.py new file mode 100644 index 000000000..61dbc8cde --- /dev/null +++ b/backend/dataall/modules/notifications/services/notification_service.py @@ -0,0 +1,70 @@ +""" +A service layer for Notifications +""" + +import logging +from dataall.base.db import exceptions + +from dataall.base.context import get_context +from dataall.modules.notifications.db import notification_models as models +from functools import wraps + +from dataall.modules.notifications.db.notification_repositories import NotificationRepository + +logger = logging.getLogger(__name__) + + +def _session(): + return get_context().db_engine.scoped_session() + + +class NotificationAccess: + @staticmethod + def is_recipient(f): + @wraps(f) + def wrapper(*args, **kwds): + uri = kwds.get('notificationUri') + if not uri: + raise KeyError(f"{f.__name__} doesn't have parameter uri.") + context = get_context() + with context.db_engine.scoped_session() as session: + notification = session.query(models.Notification).get(uri) + if notification and (notification.recipient in context.groups + [context.username]): + return f(*args, **kwds) + else: + raise exceptions.UnauthorizedOperation( + action='UPDATE NOTIFICATION', + message=f'User {context.username} is not the recipient user/group of the notification {uri}', + ) + + return wrapper + + +class NotificationService: + """ + Encapsulate the logic of interactions with notifications. + """ + + @staticmethod + def list_my_notifications(filter: dict = {}): + """List existed user notifications. Filters only required notifications by the filter param""" + context = get_context() + + with context.db_engine.scoped_session() as session: + return NotificationRepository.paginated_notifications( + session=session, username=context.username, groups=context.groups, filter=filter + ) + + @staticmethod + @NotificationAccess.is_recipient + def mark_as_read(notificationUri: str): + with get_context().db_engine.scoped_session() as session: + return NotificationRepository.read_notification(session=session, notificationUri=notificationUri) + + @staticmethod + def count_unread_notifications(): + context = get_context() + with context.db_engine.scoped_session() as session: + return NotificationRepository.count_unread_notifications( + session=session, username=context.username, groups=context.groups + ) From 4bb2c354d14f82e26e881b69b40be11a9ecacfd0 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Mon, 28 Oct 2024 16:20:07 -0400 Subject: [PATCH 14/16] Add notification service and split logic --- backend/dataall/modules/notifications/api/resolvers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/dataall/modules/notifications/api/resolvers.py b/backend/dataall/modules/notifications/api/resolvers.py index 5dbb2af97..b2b1154c4 100644 --- a/backend/dataall/modules/notifications/api/resolvers.py +++ b/backend/dataall/modules/notifications/api/resolvers.py @@ -1,6 +1,6 @@ import logging -from dataall.backend.dataall.modules.notifications.services.notification_service import NotificationService +from dataall.modules.notifications.services.notification_service import NotificationService from dataall.base.api.context import Context from dataall.base.db import exceptions From 1a814c311bccb78d3886b5f4692ca7e433d9f7b7 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Thu, 21 Nov 2024 09:52:25 -0500 Subject: [PATCH 15/16] adda test unauth notification --- .../modules/notifications/test_notifications.py | 11 +++++++++++ .../integration_tests/modules/shares/queries.py | 15 --------------- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/tests_new/integration_tests/modules/notifications/test_notifications.py b/tests_new/integration_tests/modules/notifications/test_notifications.py index 8f95d4fe9..ef9ba3aa9 100644 --- a/tests_new/integration_tests/modules/notifications/test_notifications.py +++ b/tests_new/integration_tests/modules/notifications/test_notifications.py @@ -25,6 +25,17 @@ def test_read_notification_invalid(client1): ) +def test_read_notification_unauth(client1, client2, persistent_group_share_1): + count_unread = count_unread_notifications(client1) + + response = list_notifications(client1) + assert_that(mark_notification_read).raises(GqlError).when_called_with( + client1, response.nodes[0].notificationUri + ).contains('UnauthorizedOperation', 'UPDATE NOTIFICATION') + + assert_that(count_unread_notifications(client1)).is_equal_to(count_unread) + + def test_read_notification(client1, persistent_group_share_1): count_unread = count_unread_notifications(client1) diff --git a/tests_new/integration_tests/modules/shares/queries.py b/tests_new/integration_tests/modules/shares/queries.py index 9f787247d..591a59d27 100644 --- a/tests_new/integration_tests/modules/shares/queries.py +++ b/tests_new/integration_tests/modules/shares/queries.py @@ -255,21 +255,6 @@ def revoke_share_items(client, shareUri: str, shareItemUris: List[str]): return response.data.revokeItemsShareObject -def remove_shared_item(client, shareItemUri): - query = { - 'operationName': 'RemoveSharedItem', - 'variables': {'shareItemUri': shareItemUri}, - 'query': """ - mutation RemoveSharedItem($shareItemUri: String!) { - removeSharedItem(shareItemUri: $shareItemUri) - } - """, - } - - response = client.query(query=query) - return response.data.removeSharedItem - - def get_s3_consumption_data(client, shareUri: str): query = { 'operationName': 'getS3ConsumptionData', From 39da41ca4431934718bf862f99d9ed961ee16c70 Mon Sep 17 00:00:00 2001 From: Noah Paige Date: Thu, 21 Nov 2024 10:34:56 -0500 Subject: [PATCH 16/16] fix --- .../modules/notifications/test_notifications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests_new/integration_tests/modules/notifications/test_notifications.py b/tests_new/integration_tests/modules/notifications/test_notifications.py index ef9ba3aa9..e76bb6842 100644 --- a/tests_new/integration_tests/modules/notifications/test_notifications.py +++ b/tests_new/integration_tests/modules/notifications/test_notifications.py @@ -30,7 +30,7 @@ def test_read_notification_unauth(client1, client2, persistent_group_share_1): response = list_notifications(client1) assert_that(mark_notification_read).raises(GqlError).when_called_with( - client1, response.nodes[0].notificationUri + client2, response.nodes[0].notificationUri ).contains('UnauthorizedOperation', 'UPDATE NOTIFICATION') assert_that(count_unread_notifications(client1)).is_equal_to(count_unread)