Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove redundant catch exceptions in Amazon Log Task Handlers #26442

Merged
merged 1 commit into from
Sep 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 4 additions & 14 deletions airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from airflow.compat.functools import cached_property
from airflow.configuration import conf
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin

Expand Down Expand Up @@ -51,20 +52,9 @@ def __init__(self, base_log_folder: str, log_group_arn: str, filename_template:
@cached_property
def hook(self):
"""Returns AwsLogsHook."""
remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID')
try:
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook

return AwsLogsHook(aws_conn_id=remote_conn_id, region_name=self.region_name)
except Exception as e:
self.log.error(
'Could not create an AwsLogsHook with connection id "%s". '
'Please make sure that apache-airflow[aws] is installed and '
'the Cloudwatch logs connection exists. Exception: "%s"',
remote_conn_id,
e,
)
return None
return AwsLogsHook(
aws_conn_id=conf.get('logging', 'REMOTE_LOG_CONN_ID'), region_name=self.region_name
)

def _render_filename(self, ti, try_number):
# Replace unsupported log group name characters
Expand Down
18 changes: 4 additions & 14 deletions airflow/providers/amazon/aws/log/s3_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

from airflow.compat.functools import cached_property
from airflow.configuration import conf
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin

Expand All @@ -44,20 +45,9 @@ def __init__(self, base_log_folder: str, s3_log_folder: str, filename_template:
@cached_property
def hook(self):
"""Returns S3Hook."""
remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID')
try:
from airflow.providers.amazon.aws.hooks.s3 import S3Hook

return S3Hook(remote_conn_id, transfer_config_args={"use_threads": False})
except Exception as e:
self.log.exception(
'Could not create an S3Hook with connection id "%s". '
'Please make sure that apache-airflow[aws] is installed and '
'the S3 connection exists. Exception : "%s"',
remote_conn_id,
e,
)
return None
return S3Hook(
aws_conn_id=conf.get('logging', 'REMOTE_LOG_CONN_ID'), transfer_config_args={"use_threads": False}
)

def set_context(self, ti):
super().set_context(ti)
Expand Down
23 changes: 1 addition & 22 deletions tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import time
from datetime import datetime as dt
from unittest import mock
from unittest.mock import ANY, call
from unittest.mock import call

import pytest
from watchtower import CloudWatchLogHandler
Expand Down Expand Up @@ -100,27 +100,6 @@ def setup(self, create_log_template, tmp_path_factory):
def test_hook(self):
assert isinstance(self.cloudwatch_task_handler.hook, AwsLogsHook)

@conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
def test_hook_raises(self):
handler = CloudwatchTaskHandler(
self.local_log_location,
f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}",
)

with mock.patch.object(handler.log, 'error') as mock_error:
with mock.patch("airflow.providers.amazon.aws.hooks.logs.AwsLogsHook") as mock_hook:
mock_hook.side_effect = Exception('Failed to connect')
# Initialize the hook
handler.hook

mock_error.assert_called_once_with(
'Could not create an AwsLogsHook with connection id "%s". Please make '
'sure that apache-airflow[aws] is installed and the Cloudwatch '
'logs connection exists. Exception: "%s"',
'aws_default',
ANY,
)

def test_handler(self):
self.cloudwatch_task_handler.set_context(self.ti)
assert isinstance(self.cloudwatch_task_handler.handler, CloudWatchLogHandler)
Expand Down
18 changes: 0 additions & 18 deletions tests/providers/amazon/aws/log/test_s3_task_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import contextlib
import os
from unittest import mock
from unittest.mock import ANY

import pytest
from botocore.exceptions import ClientError
Expand Down Expand Up @@ -97,23 +96,6 @@ def test_hook(self):
assert isinstance(self.s3_task_handler.hook, S3Hook)
assert self.s3_task_handler.hook.transfer_config.use_threads is False

@conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
def test_hook_raises(self):
handler = S3TaskHandler(self.local_log_location, self.remote_log_base)
with mock.patch.object(handler.log, 'error') as mock_error:
with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook:
mock_hook.side_effect = Exception('Failed to connect')
# Initialize the hook
handler.hook

mock_error.assert_called_once_with(
'Could not create an S3Hook with connection id "%s". Please make '
'sure that apache-airflow[aws] is installed and the S3 connection exists. Exception : "%s"',
'aws_default',
ANY,
exc_info=True,
)

def test_log_exists(self):
self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'')
assert self.s3_task_handler.s3_log_exists(self.remote_log_location)
Expand Down