Skip to content

Commit

Permalink
revert persistent environment
Browse files Browse the repository at this point in the history
  • Loading branch information
dlpzx committed Jul 8, 2024
1 parent 1e9732b commit 430f5b8
Showing 1 changed file with 30 additions and 31 deletions.
61 changes: 30 additions & 31 deletions tests_new/integration_tests/modules/s3_datasets/global_conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import logging
import re
import pytest
import boto3
from integration_tests.client import GqlError
Expand Down Expand Up @@ -32,7 +31,7 @@ def create_s3_dataset(client, owner, group, org_uri, env_uri, tags=[], autoAppro
autoApprovalEnabled=autoApprovalEnabled,
confidentiality=confidentiality,
)
check_stack_ready(client, env_uri, dataset.stack.stackUri)
check_stack_ready(client, env_uri, dataset.stack.stackUri, env_uri,'dataset')
return get_dataset(client, dataset.datasetUri)


Expand Down Expand Up @@ -63,12 +62,12 @@ def import_s3_dataset(
autoApprovalEnabled=autoApprovalEnabled,
confidentiality=confidentiality,
)
check_stack_ready(client, env_uri, dataset.stack.stackUri)
check_stack_ready(client, env_uri, dataset.stack.stackUri, env_uri, 'dataset')
return get_dataset(client, dataset.datasetUri)


def delete_s3_dataset(client, env_uri, dataset):
check_stack_ready(client, env_uri, dataset.stack.stackUri)
check_stack_ready(client, env_uri, dataset.stack.stackUri, env_uri, 'dataset')
try:
return delete_dataset(client, dataset.datasetUri)
except GqlError:
Expand All @@ -83,33 +82,33 @@ def delete_s3_dataset(client, env_uri, dataset):


@pytest.fixture(scope='session')
def session_s3_dataset1(client1, group1, org1, persistent_env1, session_id, testdata):
def session_s3_dataset1(client1, group1, org1, session_env1, session_id, testdata):
ds = None
try:
ds = create_s3_dataset(
client1,
owner='someone',
group=group1,
org_uri=org1['organizationUri'],
env_uri=persistent_env1['environmentUri'],
env_uri=session_env1['environmentUri'],
tags=[session_id],
)
yield ds
finally:
if ds:
delete_s3_dataset(client1, persistent_env1['environmentUri'], ds)
delete_s3_dataset(client1, session_env1['environmentUri'], ds)


@pytest.fixture(scope='session')
def session_s3_dataset2_with_table(client1, group1, org1, persistent_env1, session_id, testdata):
def session_s3_dataset2_with_table(client1, group1, org1, session_env1, session_id, testdata):
ds = None
try:
ds = create_s3_dataset(
client1,
owner='someone',
group=group1,
org_uri=org1['organizationUri'],
env_uri=persistent_env1['environmentUri'],
env_uri=session_env1['environmentUri'],
tags=[session_id],
)
creds = generate_dataset_access_token(client1, ds.datasetUri)
Expand All @@ -126,18 +125,18 @@ def session_s3_dataset2_with_table(client1, group1, org1, persistent_env1, sessi
yield ds, response.get('nodes', [])[0]
finally:
if ds:
delete_s3_dataset(client1, persistent_env1['environmentUri'], ds)
delete_s3_dataset(client1, session_env1['environmentUri'], ds)


@pytest.fixture(scope='session')
def session_imported_sse_s3_dataset1(
client1, group1, org1, persistent_env1, session_id, testdata, persistent_env1_aws_client
client1, group1, org1, session_env1, session_id, testdata, session_env1_aws_client
):
ds = None
bucket = None
bucket_name = f'sessionimportedsses3{session_id}'
try:
bucket = S3Client(session=persistent_env1_aws_client, region=persistent_env1['region']).create_bucket(
bucket = S3Client(session=session_env1_aws_client, region=session_env1['region']).create_bucket(
bucket_name=bucket_name, kms_key_id=None
)

Expand All @@ -146,21 +145,21 @@ def session_imported_sse_s3_dataset1(
owner='someone',
group=group1,
org_uri=org1['organizationUri'],
env_uri=persistent_env1['environmentUri'],
env_uri=session_env1['environmentUri'],
tags=[session_id],
bucket=bucket,
)
yield ds
finally:
if ds:
delete_s3_dataset(client1, persistent_env1['environmentUri'], ds)
delete_s3_dataset(client1, session_env1['environmentUri'], ds)
if bucket:
S3Client(session=persistent_env1_aws_client, region=persistent_env1['region']).delete_bucket(bucket)
S3Client(session=session_env1_aws_client, region=session_env1['region']).delete_bucket(bucket)


@pytest.fixture(scope='session')
def session_imported_kms_s3_dataset1(
client1, group1, org1, persistent_env1, session_id, testdata, persistent_env1_aws_client
client1, group1, org1, session_env1, session_id, testdata, session_env1_aws_client
):
ds = None
bucket = None
Expand All @@ -169,22 +168,22 @@ def session_imported_kms_s3_dataset1(
resource_name = f'sessionimportedkms{session_id}'
try:
kms_key_id, kms_alias = KMSClient(
session=persistent_env1_aws_client,
account_id=persistent_env1['AwsAccountId'],
region=persistent_env1['region'],
session=session_env1_aws_client,
account_id=session_env1['AwsAccountId'],
region=session_env1['region'],
).create_key_with_alias(resource_name)
bucket = S3Client(session=persistent_env1_aws_client, region=persistent_env1['region']).create_bucket(
bucket = S3Client(session=session_env1_aws_client, region=session_env1['region']).create_bucket(
bucket_name=resource_name, kms_key_id=kms_key_id
)
database = GlueClient(session=persistent_env1_aws_client, region=persistent_env1['region']).create_database(
database = GlueClient(session=session_env1_aws_client, region=session_env1['region']).create_database(
database_name=resource_name, bucket=resource_name
)
ds = import_s3_dataset(
client1,
owner='someone',
group=group1,
org_uri=org1['organizationUri'],
env_uri=persistent_env1['environmentUri'],
env_uri=session_env1['environmentUri'],
tags=[session_id],
bucket=bucket,
kms_alias=kms_alias,
Expand All @@ -193,17 +192,17 @@ def session_imported_kms_s3_dataset1(
yield ds
finally:
if ds:
delete_s3_dataset(client1, persistent_env1['environmentUri'], ds)
delete_s3_dataset(client1, session_env1['environmentUri'], ds)
if bucket:
S3Client(session=persistent_env1_aws_client, region=persistent_env1['region']).delete_bucket(bucket)
S3Client(session=session_env1_aws_client, region=session_env1['region']).delete_bucket(bucket)
if kms_alias:
KMSClient(
session=persistent_env1_aws_client,
account_id=persistent_env1['AwsAccountId'],
region=persistent_env1['region'],
session=session_env1_aws_client,
account_id=session_env1['AwsAccountId'],
region=session_env1['region'],
).delete_key_by_alias(kms_alias)
if database:
GlueClient(session=persistent_env1_aws_client, region=persistent_env1['region']).delete_database(database)
GlueClient(session=session_env1_aws_client, region=session_env1['region']).delete_database(database)


"""
Expand All @@ -213,21 +212,21 @@ def session_imported_kms_s3_dataset1(


@pytest.fixture(scope='function')
def temp_s3_dataset1(client1, group1, org1, persistent_env1, session_id, testdata):
def temp_s3_dataset1(client1, group1, org1, session_env1, session_id, testdata):
ds = None
try:
ds = create_s3_dataset(
client1,
owner='someone',
group=group1,
org_uri=org1['organizationUri'],
env_uri=persistent_env1['environmentUri'],
env_uri=session_env1['environmentUri'],
tags=[session_id],
)
yield ds
finally:
if ds:
delete_s3_dataset(client1, persistent_env1['environmentUri'], ds)
delete_s3_dataset(client1, session_env1['environmentUri'], ds)


"""
Expand Down

0 comments on commit 430f5b8

Please sign in to comment.