Skip to content

Commit

Permalink
Unify Logger Config for Tasks (#1709)
Browse files Browse the repository at this point in the history
### Feature or Bugfix
<!-- please choose -->
- Refactoring

### Detail
-  Unify Logger Config in Backend (focused on `/tasks`)
-  Fix Log Level setting

### Relates
- #1680
- #1662
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).

- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
  - Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
  - Is injection prevented by parametrizing queries?
  - Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
  - Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
  - Do you use a standard proven implementations?
  - Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
  - Have you used the least-privilege principle? How?


By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
  • Loading branch information
noah-paige authored Nov 26, 2024
1 parent 05bc85a commit d76d55c
Show file tree
Hide file tree
Showing 13 changed files with 13 additions and 49 deletions.
11 changes: 11 additions & 0 deletions backend/dataall/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,13 @@
from . import core, version
from .base import utils, db, api
import logging
import os
import sys

logging.basicConfig(
level=os.environ.get('LOG_LEVEL', 'INFO'),
handlers=[logging.StreamHandler(sys.stdout)],
format='[%(levelname)s] %(message)s',
)
for name in ['boto3', 's3transfer', 'botocore', 'boto', 'urllib3']:
logging.getLogger(name).setLevel(logging.ERROR)
5 changes: 1 addition & 4 deletions backend/dataall/core/environment/tasks/env_stacks_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,8 @@
from dataall.base.db import get_engine
from dataall.base.utils import Parameter

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


RETRIES = 30
SLEEP_TIME = 30
Expand Down
4 changes: 0 additions & 4 deletions backend/dataall/core/stacks/tasks/cdkproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,7 @@
from dataall.base.cdkproxy.cdk_cli_wrapper import deploy_cdk_stack
from dataall.base.db import get_engine

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
logger = logging.getLogger(__name__)
logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


if __name__ == '__main__':
Expand Down
4 changes: 0 additions & 4 deletions backend/dataall/modules/catalog/tasks/catalog_indexer_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,7 @@
from dataall.base.loader import load_modules, ImportMode
from dataall.base.utils.alarm_service import AlarmService

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


class CatalogIndexerTask:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@
from dataall.modules.omics.db.omics_repository import OmicsRepository


root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def fetch_omics_workflows(engine):
Expand Down
4 changes: 0 additions & 4 deletions backend/dataall/modules/s3_datasets/tasks/tables_syncer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,7 @@
from dataall.modules.s3_datasets.indexers.dataset_indexer import DatasetIndexer
from dataall.modules.s3_datasets.services.dataset_alarm_service import DatasetAlarmService

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def sync_tables(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,7 @@
from dataall.modules.shares_base.db.share_object_models import ShareObject
from dataall.modules.shares_base.services.share_notification_service import DataSharingNotificationType

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))

# TODO: review this task usage and remove if not needed

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,8 @@
import boto3
from botocore.exceptions import ClientError

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


ENVNAME = os.getenv('envname', 'local')
region = os.getenv('AWS_REGION', 'eu-west-1')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,7 @@
from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository


root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def persistent_email_reminders(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,7 @@
from dataall.modules.shares_base.services.shares_enums import ShareObjectActions
from dataall.modules.shares_base.services.sharing_service import SharingService

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def share_expiration_checker(engine):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,7 @@
from dataall.base.db import get_engine
from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


if __name__ == '__main__':
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@

from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


class EcsBulkShareRepplyService:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,7 @@

from dataall.base.loader import load_modules, ImportMode

root = logging.getLogger()
if not root.hasHandlers():
root.addHandler(logging.StreamHandler(sys.stdout))
log = logging.getLogger(__name__)
log.setLevel(os.environ.get('LOG_LEVEL', 'INFO'))


def verify_shares(engine):
Expand Down

0 comments on commit d76d55c

Please sign in to comment.