Skip to content

Commit

Permalink
Consistent LOG_LEVEL for ECS and ECS tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
dlpzx committed Oct 25, 2024
1 parent c3af2f5 commit abfa63d
Show file tree
Hide file tree
Showing 13 changed files with 22 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@
from dataall.base.utils import Parameter

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))

RETRIES = 30
SLEEP_TIME = 30
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/core/stacks/tasks/cdkproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@
from dataall.base.db import get_engine

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
logger = logging.getLogger(__name__)
logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


if __name__ == '__main__':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
from dataall.base.utils.alarm_service import AlarmService

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


class CatalogIndexerTask:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@


root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def fetch_omics_workflows(engine):
Expand Down
2 changes: 1 addition & 1 deletion backend/dataall/modules/s3_datasets/tasks/tables_syncer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
from dataall.modules.s3_datasets.services.dataset_alarm_service import DatasetAlarmService

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def sync_tables(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@
from dataall.modules.shares_base.services.share_notification_service import DataSharingNotificationType

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))

# TODO: review this task usage and remove if not needed

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from botocore.exceptions import ClientError

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))

ENVNAME = os.getenv('envname', 'local')
region = os.getenv('AWS_REGION', 'eu-west-1')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@


root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def persistent_email_reminders(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@
from dataall.modules.shares_base.services.sharing_service import SharingService

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def share_expiration_checker(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


if __name__ == '__main__':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


class EcsBulkShareRepplyService:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
root.setLevel(logging.INFO)
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def verify_shares(engine):
Expand Down
19 changes: 10 additions & 9 deletions deploy/stacks/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def __init__(
self._ecr_repository = ecr_repository
self._vpc = vpc
self._prod_sizing = prod_sizing
self._log_level = 'INFO' if prod_sizing else 'DEBUG'

(self.scheduled_tasks_sg, self.share_manager_sg) = self.create_ecs_security_groups(
envname, resource_prefix, vpc, vpce_connection, s3_prefix_list, lambdas
Expand Down Expand Up @@ -216,7 +217,7 @@ def add_catalog_indexer_task(self):
command=['python3.9', '-m', 'dataall.modules.catalog.tasks.catalog_indexer_task'],
container_id=container_id,
ecr_repository=self._ecr_repository,
environment=self._create_env('INFO'),
environment=self._create_env(),
image_tag=self._cdkproxy_image_tag,
log_group=self.create_log_group(self._envname, self._resource_prefix, log_group_name='catalog-indexer'),
schedule_expression=Schedule.expression('rate(6 hours)'),
Expand Down Expand Up @@ -260,7 +261,7 @@ def add_share_management_task(self):
f'ShareManagementTaskContainer{self._envname}',
container_name='container',
image=ecs.ContainerImage.from_ecr_repository(repository=self._ecr_repository, tag=self._cdkproxy_image_tag),
environment=self._create_env('DEBUG'),
environment=self._create_env(),
command=['python3.9', '-m', 'dataall.modules.shares_base.tasks.share_manager_task'],
logging=ecs.LogDriver.aws_logs(
stream_prefix='task',
Expand Down Expand Up @@ -291,7 +292,7 @@ def add_share_verifier_task(self):
command=['python3.9', '-m', 'dataall.modules.shares_base.tasks.share_verifier_task'],
container_id='container',
ecr_repository=self._ecr_repository,
environment=self._create_env('INFO'),
environment=self._create_env(),
image_tag=self._cdkproxy_image_tag,
log_group=self.create_log_group(self._envname, self._resource_prefix, log_group_name='share-verifier'),
schedule_expression=Schedule.expression('rate(7 days)'),
Expand Down Expand Up @@ -320,7 +321,7 @@ def add_share_reapplier_task(self):
f'ShareReapplierTaskContainer{self._envname}',
container_name='container',
image=ecs.ContainerImage.from_ecr_repository(repository=self._ecr_repository, tag=self._cdkproxy_image_tag),
environment=self._create_env('INFO'),
environment=self._create_env(),
command=['python3.9', '-m', 'dataall.modules.shares_base.tasks.share_reapplier_task'],
logging=ecs.LogDriver.aws_logs(
stream_prefix='task',
Expand Down Expand Up @@ -382,7 +383,7 @@ def add_subscription_task(self):
],
container_id='container',
ecr_repository=self._ecr_repository,
environment=self._create_env('INFO'),
environment=self._create_env(),
image_tag=self._cdkproxy_image_tag,
log_group=self.create_log_group(self._envname, self._resource_prefix, log_group_name='subscriptions'),
schedule_expression=Schedule.expression('rate(15 minutes)'),
Expand All @@ -402,7 +403,7 @@ def add_sync_dataset_table_task(self):
command=['python3.9', '-m', 'dataall.modules.s3_datasets.tasks.tables_syncer'],
container_id='container',
ecr_repository=self._ecr_repository,
environment=self._create_env('INFO'),
environment=self._create_env(),
image_tag=self._cdkproxy_image_tag,
log_group=self.create_log_group(self._envname, self._resource_prefix, log_group_name='tables-syncer'),
schedule_expression=Schedule.expression('rate(15 minutes)'),
Expand All @@ -422,7 +423,7 @@ def add_omics_fetch_workflows_task(self):
command=['python3.9', '-m', 'dataall.modules.omics.tasks.omics_workflows_fetcher'],
container_id='container',
ecr_repository=self._ecr_repository,
environment=self._create_env('DEBUG'),
environment=self._create_env(),
image_tag=self._cdkproxy_image_tag,
log_group=self.create_log_group(
self._envname, self._resource_prefix, log_group_name='omics-workflows-fetcher'
Expand Down Expand Up @@ -822,10 +823,10 @@ def set_scheduled_task(
def ecs_task_role(self) -> iam.Role:
return self.task_role

def _create_env(self, log_lvl) -> Dict:
def _create_env(self) -> Dict:
return {
'AWS_REGION': self.region,
'envname': self._envname,
'LOGLEVEL': log_lvl,
'LOG_LEVEL': self._log_level,
'config_location': '/config.json',
}

0 comments on commit abfa63d

Please sign in to comment.