Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Bugfix] - Changes in logic to delete share db #1706

Merged
merged 11 commits into from
Feb 4, 2025
2 changes: 1 addition & 1 deletion backend/api_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def adapted(obj, info, **kwargs):

executable_schema = get_executable_schema()
end = perf_counter()
print(f'Lambda Context ' f'Initialization took: {end - start:.3f} sec')
print(f'Lambda Context Initialization took: {end - start:.3f} sec')


def handler(event, context):
Expand Down
2 changes: 1 addition & 1 deletion backend/cdkproxymain.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
logger = logging.getLogger('cdksass')

ENVNAME = os.getenv('envname', 'local')
logger.warning(f"Application started for envname= `{ENVNAME}` DH_DOCKER_VERSION:{os.environ.get('DH_DOCKER_VERSION')}")
logger.warning(f'Application started for envname= `{ENVNAME}` DH_DOCKER_VERSION:{os.environ.get("DH_DOCKER_VERSION")}')


def connect():
Expand Down
4 changes: 3 additions & 1 deletion backend/dataall/base/api/gql/graphql_directive.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ def gql(self, with_directives=True):
if not len(self.args.keys()):
return f'@{self.name}'
else:
return f"@{self.name}({','.join([k+':'+DirectiveArgs.to_string(self.args[k]) for k in self.args.keys()])})"
return (
f'@{self.name}({",".join([k + ":" + DirectiveArgs.to_string(self.args[k]) for k in self.args.keys()])})'
)


if __name__ == '__main__':
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/api/gql/graphql_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,5 @@ def gql(self):
description_str = f'"""{self.description}"""{n}' if self.description else ''

# args = f"{', '.join([arg.name+':'+ arg.type.gql() for arg in self.arguments])}"
args = f"{', '.join([arg.gql() for arg in self.arguments])}"
args = f'{", ".join([arg.gql() for arg in self.arguments])}'
return description_str + n.join(textwrap.wrap(f'input {self.name}{{{n} {args} }}'))
2 changes: 1 addition & 1 deletion backend/dataall/base/api/gql/graphql_union_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, name, types=[], type_registry=None, resolver=lambda *_, **__:

def gql(self, *args, **kwargs):
types = self.type_registry.types() if self.type_registry else self.types
return f"union {self.name} = {'|'.join([get_named_type(t).name for t in types])}"
return f'union {self.name} = {"|".join([get_named_type(t).name for t in types])}'


if __name__ == '__main__':
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/api/gql/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def gql(self, with_directives=True):
if len(self.unions):
unions = f"""{n.join([u.gql() for u in self.unions])}{n}"""

types = f"""{n} {n.join([n+t.gql(with_directives=with_directives)+n for t in self.types])}"""
types = f"""{n} {n.join([n + t.gql(with_directives=with_directives) + n for t in self.types])}"""
return f"""{enums}{input_types}{unions}{types}"""

def visit(self, visitors=[]):
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/aws/parameter_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def __init__(self):
@staticmethod
def client(AwsAccountId=None, region=None, role=None):
if AwsAccountId:
log.info(f"SSM Parameter remote session with role:{role if role else 'PivotRole'}")
log.info(f'SSM Parameter remote session with role:{role if role else "PivotRole"}')
session = SessionHelper.remote_session(accountid=AwsAccountId, region=region, role=role)
else:
log.info('SSM Parameter session in central account')
Expand Down
4 changes: 2 additions & 2 deletions backend/dataall/base/aws/quicksight.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,14 +117,14 @@ def check_quicksight_enterprise_subscription(AwsAccountId, region=None):
else:
if response['AccountInfo']['Edition'] not in ['ENTERPRISE', 'ENTERPRISE_AND_Q']:
raise Exception(
f"Quicksight Subscription found in Account: {AwsAccountId} of incorrect type: {response['AccountInfo']['Edition']}"
f'Quicksight Subscription found in Account: {AwsAccountId} of incorrect type: {response["AccountInfo"]["Edition"]}'
)
else:
if response['AccountInfo']['AccountSubscriptionStatus'] == 'ACCOUNT_CREATED':
return True
else:
raise Exception(
f"Quicksight Subscription found in Account: {AwsAccountId} not active. Status = {response['AccountInfo']['AccountSubscriptionStatus']}"
f'Quicksight Subscription found in Account: {AwsAccountId} not active. Status = {response["AccountInfo"]["AccountSubscriptionStatus"]}'
)

except client.exceptions.ResourceNotFoundException:
Expand Down
8 changes: 4 additions & 4 deletions backend/dataall/base/cdkproxy/cdk_cli_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s
CommandSanitizer(input_args)

cmd = [
'' '. ~/.nvm/nvm.sh &&',
'. ~/.nvm/nvm.sh &&',
'cdk',
'deploy --all',
'--require-approval',
Expand All @@ -161,7 +161,7 @@ def deploy_cdk_stack(engine: Engine, stackid: str, app_path: str = None, path: s
f'"{sys.executable} {app_path}"',
'--verbose',
]
logger.info(f"Running command : \n {' '.join(cmd)}")
logger.info(f'Running command : \n {" ".join(cmd)}')

# This command is too complex to be executed as a list of commands. We need to run it with shell=True
# However, the input arguments have to be sanitized with the CommandSanitizer
Expand Down Expand Up @@ -217,7 +217,7 @@ def describe_stack(stack, engine: Engine = None, stackid: str = None):

def cdk_installed():
cmd1 = ['.', '~/.nvm/nvm.sh']
logger.info(f"Running command {' '.join(cmd1)}")
logger.info(f'Running command {" ".join(cmd1)}')
subprocess.run(
cmd1,
text=True,
Expand All @@ -229,7 +229,7 @@ def cdk_installed():
)

cmd2 = ['cdk', '--version']
logger.info(f"Running command {' '.join(cmd2)}")
logger.info(f'Running command {" ".join(cmd2)}')
subprocess.run(
cmd2,
text=True,
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/db/dbconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(self, user: str, pwd: str, host: str, db: str, schema: str):
for param in (user, db, schema):
if len(param) > _POSTGRES_MAX_LEN:
raise ValueError(
f"PostgreSQL doesn't allow values more than 63 characters" f' parameters {user}, {db}, {schema}'
f"PostgreSQL doesn't allow values more than 63 characters parameters {user}, {db}, {schema}"
)

if len(host) > _MAX_HOST_LENGTH:
Expand Down
6 changes: 2 additions & 4 deletions backend/dataall/base/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,8 +238,7 @@ def _check_loading_correct(in_config: Set[str], modes: Set[ImportMode]):
for module in _all_modules():
if module.is_supported(modes) and module not in expected_load:
raise ImportError(
f'ModuleInterface has not been initialized for module {module.name()}. '
'Declare the module in depends_on'
f'ModuleInterface has not been initialized for module {module.name()}. Declare the module in depends_on'
)

# 4) Checks all references for modules (when ModuleInterfaces don't exist or not supported)
Expand All @@ -260,8 +259,7 @@ def _describe_loading(in_config: Set[str], inactive: Set[str]):
log.debug(f'The {name} module was loaded')
if name in inactive:
log.info(
f'There is a module that depends on {module.name()}. '
"The module has been loaded despite it's inactive."
f"There is a module that depends on {module.name()}. The module has been loaded despite it's inactive."
)
elif name not in in_config:
log.info(
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/utils/alarm_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def trigger_stack_deployment_failure_alarm(self, stack: Stack):
- State Change: OK -> ALARM
- Reason for State Change: Stack Deployment Failure
- Timestamp: {datetime.now()}
- CW Log Group: {f"/dataall/{self.envname}/cdkproxy/{stack.EcsTaskArn.split('/')[-1]}"}
- CW Log Group: {f'/dataall/{self.envname}/cdkproxy/{stack.EcsTaskArn.split("/")[-1]}'}
"""
return self.publish_message_to_alarms_topic(subject, message)

Expand Down
4 changes: 2 additions & 2 deletions backend/dataall/base/utils/api_handler_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def redact_creds(event):
def get_cognito_groups(claims):
if not claims:
raise ValueError(
'Received empty claims. ' 'Please verify authorizer configuration',
'Received empty claims. Please verify authorizer configuration',
claims,
)
groups = list()
Expand Down Expand Up @@ -134,7 +134,7 @@ def check_reauth(query, auth_time, username):
log.info(f'ReAuth Required for User {username} on Operation {query.get("operationName", "")}, Error: {e}')
return send_unauthorized_response(
operation=query.get('operationName', 'operation'),
message=f"ReAuth Required To Perform This Action {query.get('operationName', '')}",
message=f'ReAuth Required To Perform This Action {query.get("operationName", "")}',
extension={'code': 'REAUTH'},
)

Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/base/utils/iam_policy_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,5 +156,5 @@ def _policy_splitter(
resulting_statement = statement_builder(split=split, subset=subset)
split += 1
resulting_statements.append(resulting_statement)
logger.info(f'Statement divided into {split+1} smaller statements')
logger.info(f'Statement divided into {split + 1} smaller statements')
return resulting_statements
2 changes: 1 addition & 1 deletion backend/dataall/base/utils/naming_convention.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def build_compliant_name(self) -> str:
separator = NamingConventionPattern[self.service].value['separator']
max_length = NamingConventionPattern[self.service].value['max_length']
suffix = f'-{self.target_uri}' if len(self.target_uri) else ''
return f"{slugify(self.resource_prefix + '-' + self.target_label[:(max_length - len(self.resource_prefix + self.target_uri))] + suffix, regex_pattern=fr'{regex}', separator=separator, lowercase=True)}"
return f'{slugify(self.resource_prefix + "-" + self.target_label[: (max_length - len(self.resource_prefix + self.target_uri))] + suffix, regex_pattern=rf"{regex}", separator=separator, lowercase=True)}'

def validate_name(self):
regex = NamingConventionPattern[self.service].value['regex']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]:
policies.append(
aws_iam.ManagedPolicy(
self.stack,
f'{self.id}-{index+1}',
managed_policy_name=f'{self.id}-{index+1}',
f'{self.id}-{index + 1}',
managed_policy_name=f'{self.id}-{index + 1}',
statements=chunk,
)
)
Expand Down
4 changes: 2 additions & 2 deletions backend/dataall/core/environment/cdk/environment_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs):

pivot_role_as_part_of_environment_stack = ParameterStoreManager.get_parameter_value(
region=os.getenv('AWS_REGION', 'eu-west-1'),
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate",
parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/enablePivotRoleAutoCreate',
)
self.create_pivot_role = True if pivot_role_as_part_of_environment_stack == 'True' else False
self.engine = self.get_engine()
Expand Down Expand Up @@ -582,7 +582,7 @@ def create_topic(self, construct_id, central_account, environment, kms_key):
def create_integration_tests_role(self):
toolingAccount = ParameterStoreManager.get_parameter_value(
region=os.getenv('AWS_REGION', 'eu-west-1'),
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/toolingAccount",
parameter_path=f'/dataall/{os.getenv("envname", "local")}/toolingAccount',
)
self.test_role = iam.Role(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ def validate_account_region(data, session):
if environment:
raise exceptions.InvalidInput(
'AwsAccount/region',
f"{data.get('AwsAccountId')}/{data.get('region')}",
f"unique. An environment for {data.get('AwsAccountId')}/{data.get('region')} already exists",
f'{data.get("AwsAccountId")}/{data.get("region")}',
f'unique. An environment for {data.get("AwsAccountId")}/{data.get("region")} already exists',
)

@staticmethod
Expand Down Expand Up @@ -163,7 +163,7 @@ def validate_permissions(session, uri, g_permissions, group):
def get_pivot_role_as_part_of_environment():
ssm_param = ParameterStoreManager.get_parameter_value(
region=os.getenv('AWS_REGION', 'eu-west-1'),
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate",
parameter_path=f'/dataall/{os.getenv("envname", "local")}/pivotRole/enablePivotRoleAutoCreate',
)
return ssm_param == 'True'

Expand Down Expand Up @@ -238,7 +238,7 @@ def create_environment(uri, data=None):
isOrganizationDefaultEnvironment=False,
EnvironmentDefaultIAMRoleName=data.get('EnvironmentDefaultIAMRoleArn', 'unknown').split('/')[-1],
EnvironmentDefaultIAMRoleArn=data.get('EnvironmentDefaultIAMRoleArn', 'unknown'),
CDKRoleArn=f"arn:aws:iam::{data.get('AwsAccountId')}:role/{cdk_role_name}",
CDKRoleArn=f'arn:aws:iam::{data.get("AwsAccountId")}:role/{cdk_role_name}',
resourcePrefix=data.get('resourcePrefix'),
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def query_user_organizations(session, username, groups, filter) -> Query:
models.Organization.label.ilike('%' + filter.get('term') + '%'),
models.Organization.description.ilike('%' + filter.get('term') + '%'),
models.Organization.tags.contains(
f"{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=filter.get('term')).sanitize()}}}"
f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=filter.get("term")).sanitize()}}}'
),
)
)
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/core/stacks/api/resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def resolve_task_id(context, source: Stack, **kwargs):
def get_stack_logs(context: Context, source, targetUri: str = None, targetType: str = None):
query = StackService.get_stack_logs(target_uri=targetUri, target_type=targetType)
envname = os.getenv('envname', 'local')
log_group_name = f"/{Parameter().get_parameter(env=envname, path='resourcePrefix')}/{envname}/ecs/cdkproxy"
log_group_name = f'/{Parameter().get_parameter(env=envname, path="resourcePrefix")}/{envname}/ecs/cdkproxy'
log_query_period_days = config.get_property('core.log_query_period_days', 1)

results = CloudWatch.run_query(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def get_quicksight_reader_url(cls, uri):
)

session_type = ParameterStoreManager.get_parameter_value(
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/quicksight/sharedDashboardsSessions"
parameter_path=f'/dataall/{os.getenv("envname", "local")}/quicksight/sharedDashboardsSessions'
)

if session_type == 'reader':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def initialize_repo(self):
'git remote add origin ${REPO_URL}',
]

logger.info(f"Running Commands: {'; '.join(cmd_init)}")
logger.info(f'Running Commands: {"; ".join(cmd_init)}')

CommandSanitizer(args=[self.pipeline.repo, self.pipeline.SamlGroupName])

Expand Down Expand Up @@ -236,7 +236,7 @@ def git_push_repo(self):
'git push -u origin main',
]

logger.info(f"Running Commands: {'; '.join(git_cmds)}")
logger.info(f'Running Commands: {"; ".join(git_cmds)}')

# This command does not include any customer upstream input
# no sanitization is needed and shell=true does not impose a risk
Expand All @@ -258,7 +258,7 @@ def clean_up_repo(pipeline_dir):
code_dir_path = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'blueprints'))

cmd = ['rm', '-rf', f'./{pipeline_dir}']
logger.info(f"Running command : \n {' '.join(cmd)}")
logger.info(f'Running command : \n {" ".join(cmd)}')

process = subprocess.run(
cmd, text=True, shell=False, encoding='utf-8', capture_output=True, cwd=code_dir_path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,7 @@ def write_ddk_json_multienvironment(
def initialize_repo(pipeline, code_dir_path, env_vars):
cmd_init = [f'mkdir {pipeline.repo}', f'cp -R data_pipeline_blueprint/* {pipeline.repo}/']

logger.info(f"Running Commands: {'; '.join(cmd_init)}")
logger.info(f'Running Commands: {"; ".join(cmd_init)}')

CommandSanitizer(args=[pipeline.repo])

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def create_pipeline_environment(
environmentLabel=environment.label,
pipelineUri=pipeline.DataPipelineUri,
pipelineLabel=pipeline.label,
envPipelineUri=f"{pipeline.DataPipelineUri}{environment.environmentUri}{data['stage']}",
envPipelineUri=f'{pipeline.DataPipelineUri}{environment.environmentUri}{data["stage"]}',
AwsAccountId=environment.AwsAccountId,
region=environment.region,
stage=data['stage'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def get_maintenance_window_status():
# Check if ECS tasks are running
ecs_cluster_name = ParameterStoreManager.get_parameter_value(
region=os.getenv('AWS_REGION', 'eu-west-1'),
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/ecs/cluster/name",
parameter_path=f'/dataall/{os.getenv("envname", "local")}/ecs/cluster/name',
)
if Ecs.is_task_running(cluster_name=ecs_cluster_name):
logger.info(f'Current maintenance window status - {maintenance_record.status}')
Expand Down Expand Up @@ -143,7 +143,7 @@ def _get_maintenance_window_mode(engine):
def _get_ecs_rules():
ecs_scheduled_rules = ParameterStoreManager.get_parameters_by_path(
region=os.getenv('AWS_REGION', 'eu-west-1'),
parameter_path=f"/dataall/{os.getenv('envname', 'local')}/ecs/ecs_scheduled_tasks/rule",
parameter_path=f'/dataall/{os.getenv("envname", "local")}/ecs/ecs_scheduled_tasks/rule',
)
logger.debug(ecs_scheduled_rules)
return [item['Value'] for item in ecs_scheduled_rules]
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def count_resources(session, environment, group_uri):
@staticmethod
def create_sagemaker_studio_domain(session, username, environment, data):
domain = SagemakerStudioDomain(
label=f"{data.get('label')}-domain",
label=f'{data.get("label")}-domain',
owner=username,
description=data.get('description', 'No description provided'),
tags=data.get('tags', []),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,19 +37,19 @@ def fetch_omics_workflows(engine):
workflows = ready_workflows # + private_workflows
log.info(f'Found workflows {str(workflows)} in environment {env.environmentUri}')
for workflow in workflows:
log.info(f"Processing workflow name={workflow['name']}, id={workflow['id']}...")
log.info(f'Processing workflow name={workflow["name"]}, id={workflow["id"]}...')
existing_workflow = OmicsRepository(session).get_workflow_by_id(workflow['id'])
if existing_workflow is not None:
log.info(
f"Workflow name={workflow['name']}, id={workflow['id']} has already been registered in database. Updating information..."
f'Workflow name={workflow["name"]}, id={workflow["id"]} has already been registered in database. Updating information...'
)
existing_workflow.name = workflow['name']
existing_workflow.label = workflow['name']
session.commit()

else:
log.info(
f"Workflow name={workflow['name']} , id={workflow['id']} in environment {env.environmentUri} is new. Registering..."
f'Workflow name={workflow["name"]} , id={workflow["id"]} in environment {env.environmentUri} is new. Registering...'
)
omicsWorkflow = OmicsWorkflow(
id=workflow['id'],
Expand Down
Loading