Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AIRFLOW-3789] Fix flake8 3.7 errors. #4617

Merged
merged 1 commit into from
Jan 30, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions airflow/config_templates/airflow_local_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,10 +214,10 @@
mkdirs(directory, 0o755)

if REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('s3://'):
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['s3'])
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['s3'])
elif REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('gs://'):
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['gcs'])
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['gcs'])
elif REMOTE_LOGGING and REMOTE_BASE_LOG_FOLDER.startswith('wasb'):
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['wasb'])
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['wasb'])
elif REMOTE_LOGGING and ELASTICSEARCH_HOST:
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['elasticsearch'])
DEFAULT_LOGGING_CONFIG['handlers'].update(REMOTE_HANDLERS['elasticsearch'])
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/gcp_dataflow_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def wait_for_done(self):
if self._proc.poll() is not None:
# Mark process completion but allows its outputs to be consumed.
process_ends = True
if self._proc.returncode is not 0:
if self._proc.returncode != 0:
raise Exception("DataFlow failed with return code {}".format(
self._proc.returncode))
return job_id
Expand Down
6 changes: 3 additions & 3 deletions airflow/contrib/hooks/imap_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,11 +141,11 @@ def download_mail_attachments(self,
self._create_files(mail_attachments, local_output_directory)

def _handle_not_found_mode(self, not_found_mode):
if not_found_mode is 'raise':
if not_found_mode == 'raise':
raise AirflowException('No mail attachments found!')
elif not_found_mode is 'warn':
elif not_found_mode == 'warn':
self.log.warning('No mail attachments found!')
elif not_found_mode is 'ignore':
elif not_found_mode == 'ignore':
pass # Do not notify if the attachment has not been found.
else:
self.log.error('Invalid "not_found_mode" %s', not_found_mode)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
# under the License.

import yaml
from airflow.contrib.kubernetes.pod import Pod
from airflow.contrib.kubernetes.kubernetes_request_factory.kubernetes_request_factory \
import KubernetesRequestFactory

Expand Down
5 changes: 3 additions & 2 deletions airflow/contrib/kubernetes/pod_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import State
from datetime import datetime as dt
from airflow.contrib.kubernetes.pod import Pod
from airflow.contrib.kubernetes.kubernetes_request_factory import \
pod_request_factory as pod_factory
from kubernetes import watch, client
Expand Down Expand Up @@ -69,7 +70,7 @@ def delete_pod(self, pod):
raise

def run_pod(self, pod, startup_timeout=120, get_logs=True):
# type: (Pod) -> (State, result)
# type: (Pod, int, bool) -> (State, str)
"""
Launches the pod synchronously and waits for completion.
Args:
Expand All @@ -90,7 +91,7 @@ def run_pod(self, pod, startup_timeout=120, get_logs=True):
return self._monitor_pod(pod, get_logs)

def _monitor_pod(self, pod, get_logs):
# type: (Pod) -> (State, content)
# type: (Pod, bool) -> (State, str)

if get_logs:
logs = self._client.read_namespaced_pod_log(
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/mlengine_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def _normalize_mlengine_job_id(job_id):

# Add a prefix when a job_id starts with a digit or a template
match = re.search(r'\d|\{{2}', job_id)
if match and match.start() is 0:
if match and match.start() == 0:
job = 'z_{}'.format(job_id)
else:
job = job_id
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/ssh_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def execute(self, context):
stderr.close()

exit_status = stdout.channel.recv_exit_status()
if exit_status is 0:
if exit_status == 0:
# returning output if do_xcom_push is set
if self.do_xcom_push:
enable_pickling = configuration.conf.getboolean(
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/operators/winrm_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def execute(self, context):
except Exception as e:
raise AirflowException("WinRM operator error: {0}".format(str(e)))

if return_code is 0:
if return_code == 0:
# returning output if do_xcom_push is set
if self.do_xcom_push:
enable_pickling = configuration.conf.getboolean(
Expand Down
3 changes: 2 additions & 1 deletion airflow/contrib/utils/gcp_field_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@

import re

from typing import Callable
from airflow import LoggingMixin, AirflowException

COMPOSITE_FIELD_TYPES = ['union', 'dict', 'list']
Expand Down Expand Up @@ -209,7 +210,7 @@ def _get_field_name_with_parent(field_name, parent):
@staticmethod
def _sanity_checks(children_validation_specs, field_type, full_field_path,
regexp, allow_empty, custom_validation, value):
# type: (dict, str, str, str, function, object) -> None
# type: (dict, str, str, str, Callable, object) -> None
if value is None and field_type != 'union':
raise GcpFieldValidationException(
"The required body field '{}' is missing. Please add it.".
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ def write_version(filename=os.path.join(*['airflow',
'rednose',
'requests_mock',
'flake8>=3.6.0',
'typing',
]

if not PY3:
Expand Down
8 changes: 4 additions & 4 deletions tests/hooks/test_http_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,13 +140,13 @@ def test_hook_contains_header_from_extra_field(self, m):

@requests_mock.mock()
def test_hook_uses_provided_header(self, m):
conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"})
self.assertEqual(conn.headers.get('bareer'), "newT0k3n")
conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"})
self.assertEqual(conn.headers.get('bareer'), "newT0k3n")

@requests_mock.mock()
def test_hook_has_no_header_from_extra(self, m):
conn = self.get_hook.get_conn()
self.assertIsNone(conn.headers.get('bareer'))
conn = self.get_hook.get_conn()
self.assertIsNone(conn.headers.get('bareer'))

@requests_mock.mock()
def test_hooks_header_from_extra_is_overridden(self, m):
Expand Down
66 changes: 33 additions & 33 deletions tests/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,44 +341,44 @@ def test_backfill_rerun_failed_tasks(self):
self.assertEqual(ti.state, State.SUCCESS)

def test_backfill_rerun_upstream_failed_tasks(self):
dag = DAG(
dag_id='test_backfill_rerun_upstream_failed',
start_date=DEFAULT_DATE,
schedule_interval='@daily')
dag = DAG(
dag_id='test_backfill_rerun_upstream_failed',
start_date=DEFAULT_DATE,
schedule_interval='@daily')

with dag:
t1 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-1',
dag=dag)
t2 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-2',
dag=dag)
t1.set_upstream(t2)
with dag:
t1 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-1',
dag=dag)
t2 = DummyOperator(task_id='test_backfill_rerun_upstream_failed_task-2',
dag=dag)
t1.set_upstream(t2)

dag.clear()
executor = TestExecutor(do_update=True)
dag.clear()
executor = TestExecutor(do_update=True)

job = BackfillJob(dag=dag,
executor=executor,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=2),
)
job.run()
job = BackfillJob(dag=dag,
executor=executor,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=2),
)
job.run()

ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'),
execution_date=DEFAULT_DATE)
ti.refresh_from_db()
ti.set_state(State.UPSTREAM_FAILED)
ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'),
execution_date=DEFAULT_DATE)
ti.refresh_from_db()
ti.set_state(State.UPSTREAM_FAILED)

job = BackfillJob(dag=dag,
executor=executor,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=2),
rerun_failed_tasks=True
)
job.run()
ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'),
execution_date=DEFAULT_DATE)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
job = BackfillJob(dag=dag,
executor=executor,
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=2),
rerun_failed_tasks=True
)
job.run()
ti = TI(task=dag.get_task('test_backfill_rerun_upstream_failed_task-1'),
execution_date=DEFAULT_DATE)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)

def test_backfill_rerun_failed_tasks_without_flag(self):
dag = DAG(
Expand Down