Skip to content

Commit d494d5a

Browse files
Fokkokaxil
authored andcommitted
[AIRFLOW-2918] Fix Flake8 violations (#3772)
- Unused imports - Wrong import order - Small identation fixes - Remove one letter variables - Fix noqa annotations
1 parent afe857f commit d494d5a

File tree

54 files changed

+338
-338
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+338
-338
lines changed

airflow/__init__.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@
3232

3333
import sys
3434

35-
from airflow import configuration as conf
36-
from airflow import settings
35+
# flake8: noqa: F401
36+
from airflow import settings, configuration as conf
3737
from airflow.models import DAG
3838
from flask_admin import BaseView
3939
from importlib import import_module

airflow/contrib/auth/backends/github_enterprise_auth.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -19,18 +19,14 @@
1919
import flask_login
2020

2121
# Need to expose these downstream
22-
# pylint: disable=unused-import
23-
from flask_login import (current_user,
24-
logout_user,
25-
login_required,
26-
login_user)
27-
# pylint: enable=unused-import
22+
# flake8: noqa: F401
23+
from flask_login import current_user, logout_user, login_required, login_user
2824

2925
from flask import url_for, redirect, request
3026

3127
from flask_oauthlib.client import OAuth
3228

33-
from airflow import models, configuration, settings
29+
from airflow import models, configuration
3430
from airflow.configuration import AirflowConfigException
3531
from airflow.utils.db import provide_session
3632
from airflow.utils.log.logging_mixin import LoggingMixin

airflow/contrib/auth/backends/google_auth.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -19,18 +19,14 @@
1919
import flask_login
2020

2121
# Need to expose these downstream
22-
# pylint: disable=unused-import
23-
from flask_login import (current_user,
24-
logout_user,
25-
login_required,
26-
login_user)
27-
# pylint: enable=unused-import
22+
# flake8: noqa: F401
23+
from flask_login import current_user, logout_user, login_required, login_user
2824

2925
from flask import url_for, redirect, request
3026

3127
from flask_oauthlib.client import OAuth
3228

33-
from airflow import models, configuration, settings
29+
from airflow import models, configuration
3430
from airflow.utils.db import provide_session
3531
from airflow.utils.log.logging_mixin import LoggingMixin
3632

airflow/contrib/auth/backends/kerberos_auth.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@
2121
import flask_login
2222
from flask_login import current_user
2323
from flask import flash
24-
from wtforms import (
25-
Form, PasswordField, StringField)
24+
from wtforms import Form, PasswordField, StringField
2625
from wtforms.validators import InputRequired
2726

2827
# pykerberos should be used as it verifies the KDC, the "kerberos" module does not do so
@@ -32,7 +31,6 @@
3231

3332
from flask import url_for, redirect
3433

35-
from airflow import settings
3634
from airflow import models
3735
from airflow import configuration
3836
from airflow.utils.db import provide_session

airflow/contrib/auth/backends/ldap_auth.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,12 @@
1919
from future.utils import native
2020

2121
import flask_login
22-
from flask_login import login_required, current_user, logout_user
22+
from flask_login import login_required, current_user, logout_user # noqa: F401
2323
from flask import flash
24-
from wtforms import (
25-
Form, PasswordField, StringField)
24+
from wtforms import Form, PasswordField, StringField
2625
from wtforms.validators import InputRequired
2726

28-
from ldap3 import Server, Connection, Tls, LEVEL, SUBTREE, BASE
27+
from ldap3 import Server, Connection, Tls, LEVEL, SUBTREE
2928
import ssl
3029

3130
from flask import url_for, redirect

airflow/contrib/operators/mlengine_prediction_summary.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# flake8: noqa: F841
12
#
23
# Licensed to the Apache Software Foundation (ASF) under one or more
34
# contributor license agreements. See the NOTICE file distributed with

airflow/default_login.py

+2-5
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,11 @@
2525
"""
2626

2727
import flask_login
28-
from flask_login import login_required, current_user, logout_user
28+
from flask_login import login_required, current_user, logout_user # noqa: F401
2929

3030
from flask import url_for, redirect
3131

32-
from airflow import settings
32+
from airflow import settings # noqa: F401
3333
from airflow import models
3434
from airflow.utils.db import provide_session
3535

@@ -64,9 +64,6 @@ def is_superuser(self):
6464
"""Access all the things"""
6565
return True
6666

67-
# models.User = User # hack!
68-
# del User
69-
7067

7168
@login_manager.user_loader
7269
@provide_session

airflow/jobs.py

+17-18
Original file line numberDiff line numberDiff line change
@@ -657,7 +657,7 @@ def manage_slas(self, dag, session=None):
657657
slas = (
658658
session
659659
.query(SlaMiss)
660-
.filter(SlaMiss.notification_sent == False)
660+
.filter(SlaMiss.notification_sent == False) # noqa: E712
661661
.filter(SlaMiss.dag_id == dag.dag_id)
662662
.all()
663663
)
@@ -707,16 +707,13 @@ def manage_slas(self, dag, session=None):
707707
Blocking tasks:
708708
<pre><code>{blocking_task_list}\n{bug}<code></pre>
709709
""".format(bug=asciiart.bug, **locals())
710-
emails = []
711-
for t in dag.tasks:
712-
if t.email:
713-
if isinstance(t.email, basestring):
714-
l = [t.email]
715-
elif isinstance(t.email, (list, tuple)):
716-
l = t.email
717-
for email in l:
718-
if email not in emails:
719-
emails.append(email)
710+
emails = set()
711+
for task in dag.tasks:
712+
if task.email:
713+
if isinstance(task.email, basestring):
714+
emails.add(task.email)
715+
elif isinstance(task.email, (list, tuple)):
716+
emails |= set(task.email)
720717
if emails and len(slas):
721718
try:
722719
send_email(
@@ -817,7 +814,7 @@ def create_dag_run(self, dag, session=None):
817814
session.query(func.max(DagRun.execution_date))
818815
.filter_by(dag_id=dag.dag_id)
819816
.filter(or_(
820-
DagRun.external_trigger == False,
817+
DagRun.external_trigger == False, # noqa: E712
821818
# add % as a wildcard for the like query
822819
DagRun.run_id.like(DagRun.ID_PREFIX + '%')
823820
))
@@ -1088,14 +1085,16 @@ def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
10881085
DR,
10891086
and_(DR.dag_id == TI.dag_id, DR.execution_date == TI.execution_date)
10901087
)
1091-
.filter(or_(DR.run_id == None, # noqa E711
1088+
.filter(or_(DR.run_id == None, # noqa: E711
10921089
not_(DR.run_id.like(BackfillJob.ID_PREFIX + '%'))))
10931090
.outerjoin(DM, DM.dag_id == TI.dag_id)
1094-
.filter(or_(DM.dag_id == None, # noqa E711
1091+
.filter(or_(DM.dag_id == None, # noqa: E711
10951092
not_(DM.is_paused)))
10961093
)
10971094
if None in states:
1098-
ti_query = ti_query.filter(or_(TI.state == None, TI.state.in_(states))) # noqa E711
1095+
ti_query = ti_query.filter(
1096+
or_(TI.state == None, TI.state.in_(states)) # noqa: E711
1097+
)
10991098
else:
11001099
ti_query = ti_query.filter(TI.state.in_(states))
11011100

@@ -1183,8 +1182,8 @@ def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
11831182
)
11841183
if current_task_concurrency >= task_concurrency_limit:
11851184
self.log.info(
1186-
"Not executing %s since the number of tasks running or queued from DAG %s"
1187-
" is >= to the DAG's task concurrency limit of %s",
1185+
"Not executing %s since the number of tasks running or queued "
1186+
"from DAG %s is >= to the DAG's task concurrency limit of %s",
11881187
task_instance, dag_id, task_concurrency_limit
11891188
)
11901189
continue
@@ -1260,7 +1259,7 @@ def _change_state_for_executable_task_instances(self, task_instances,
12601259

12611260
if None in acceptable_states:
12621261
ti_query = ti_query.filter(
1263-
or_(TI.state == None, TI.state.in_(acceptable_states)) # noqa E711
1262+
or_(TI.state == None, TI.state.in_(acceptable_states)) # noqa: E711
12641263
)
12651264
else:
12661265
ti_query = ti_query.filter(TI.state.in_(acceptable_states))

airflow/migrations/env.py

+1
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,7 @@ def run_migrations_online():
8585
with context.begin_transaction():
8686
context.run_migrations()
8787

88+
8889
if context.is_offline_mode():
8990
run_migrations_offline()
9091
else:

airflow/migrations/versions/05f30312d566_merge_heads.py

-3
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,6 @@
3131
branch_labels = None
3232
depends_on = None
3333

34-
from alembic import op
35-
import sqlalchemy as sa
36-
3734

3835
def upgrade():
3936
pass

airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py

+16-10
Original file line numberDiff line numberDiff line change
@@ -25,16 +25,16 @@
2525
2626
"""
2727

28+
from alembic import op
29+
from sqlalchemy.dialects import mysql
30+
import sqlalchemy as sa
31+
2832
# revision identifiers, used by Alembic.
2933
revision = '0e2a74e0fc9f'
3034
down_revision = 'd2ae31099d61'
3135
branch_labels = None
3236
depends_on = None
3337

34-
from alembic import op
35-
from sqlalchemy.dialects import mysql
36-
import sqlalchemy as sa
37-
3838

3939
def upgrade():
4040
conn = op.get_bind()
@@ -69,14 +69,16 @@ def upgrade():
6969
op.alter_column(table_name='log', column_name='dttm', type_=mysql.TIMESTAMP(fsp=6))
7070
op.alter_column(table_name='log', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6))
7171

72-
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6), nullable=False)
72+
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6),
73+
nullable=False)
7374
op.alter_column(table_name='sla_miss', column_name='timestamp', type_=mysql.TIMESTAMP(fsp=6))
7475

7576
op.alter_column(table_name='task_fail', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6))
7677
op.alter_column(table_name='task_fail', column_name='start_date', type_=mysql.TIMESTAMP(fsp=6))
7778
op.alter_column(table_name='task_fail', column_name='end_date', type_=mysql.TIMESTAMP(fsp=6))
7879

79-
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6), nullable=False)
80+
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.TIMESTAMP(fsp=6),
81+
nullable=False)
8082
op.alter_column(table_name='task_instance', column_name='start_date', type_=mysql.TIMESTAMP(fsp=6))
8183
op.alter_column(table_name='task_instance', column_name='end_date', type_=mysql.TIMESTAMP(fsp=6))
8284
op.alter_column(table_name='task_instance', column_name='queued_dttm', type_=mysql.TIMESTAMP(fsp=6))
@@ -117,14 +119,16 @@ def upgrade():
117119
op.alter_column(table_name='log', column_name='dttm', type_=sa.TIMESTAMP(timezone=True))
118120
op.alter_column(table_name='log', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True))
119121

120-
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True), nullable=False)
122+
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True),
123+
nullable=False)
121124
op.alter_column(table_name='sla_miss', column_name='timestamp', type_=sa.TIMESTAMP(timezone=True))
122125

123126
op.alter_column(table_name='task_fail', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True))
124127
op.alter_column(table_name='task_fail', column_name='start_date', type_=sa.TIMESTAMP(timezone=True))
125128
op.alter_column(table_name='task_fail', column_name='end_date', type_=sa.TIMESTAMP(timezone=True))
126129

127-
op.alter_column(table_name='task_instance', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True), nullable=False)
130+
op.alter_column(table_name='task_instance', column_name='execution_date', type_=sa.TIMESTAMP(timezone=True),
131+
nullable=False)
128132
op.alter_column(table_name='task_instance', column_name='start_date', type_=sa.TIMESTAMP(timezone=True))
129133
op.alter_column(table_name='task_instance', column_name='end_date', type_=sa.TIMESTAMP(timezone=True))
130134
op.alter_column(table_name='task_instance', column_name='queued_dttm', type_=sa.TIMESTAMP(timezone=True))
@@ -161,14 +165,16 @@ def downgrade():
161165
op.alter_column(table_name='log', column_name='dttm', type_=mysql.DATETIME(fsp=6))
162166
op.alter_column(table_name='log', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
163167

164-
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False)
168+
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.DATETIME(fsp=6),
169+
nullable=False)
165170
op.alter_column(table_name='sla_miss', column_name='DATETIME', type_=mysql.DATETIME(fsp=6))
166171

167172
op.alter_column(table_name='task_fail', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
168173
op.alter_column(table_name='task_fail', column_name='start_date', type_=mysql.DATETIME(fsp=6))
169174
op.alter_column(table_name='task_fail', column_name='end_date', type_=mysql.DATETIME(fsp=6))
170175

171-
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False)
176+
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.DATETIME(fsp=6),
177+
nullable=False)
172178
op.alter_column(table_name='task_instance', column_name='start_date', type_=mysql.DATETIME(fsp=6))
173179
op.alter_column(table_name='task_instance', column_name='end_date', type_=mysql.DATETIME(fsp=6))
174180
op.alter_column(table_name='task_instance', column_name='queued_dttm', type_=mysql.DATETIME(fsp=6))

airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,14 @@
2323
Create Date: 2017-01-25 11:43:51.635667
2424
2525
"""
26+
from alembic import op
2627

2728
# revision identifiers, used by Alembic.
2829
revision = '127d2bf2dfa7'
2930
down_revision = '5e7d17757c7a'
3031
branch_labels = None
3132
depends_on = None
3233

33-
from alembic import op
34-
import sqlalchemy as sa
3534

3635
def upgrade():
3736
op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)

airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,16 @@
2424
Create Date: 2015-08-18 18:57:51.927315
2525
2626
"""
27+
from alembic import op
28+
import sqlalchemy as sa
29+
from sqlalchemy.engine.reflection import Inspector
2730

2831
# revision identifiers, used by Alembic.
2932
revision = '1507a7289a2f'
3033
down_revision = 'e3a246e0dc1'
3134
branch_labels = None
3235
depends_on = None
3336

34-
from alembic import op
35-
import sqlalchemy as sa
36-
from sqlalchemy.engine.reflection import Inspector
37-
3837
connectionhelper = sa.Table(
3938
'connection',
4039
sa.MetaData(),

airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py

+3-4
Original file line numberDiff line numberDiff line change
@@ -24,19 +24,18 @@
2424
Create Date: 2016-02-02 17:20:55.692295
2525
2626
"""
27+
from alembic import op
28+
import sqlalchemy as sa
2729

2830
# revision identifiers, used by Alembic.
2931
revision = '1968acfc09e3'
3032
down_revision = 'bba5a7cfc896'
3133
branch_labels = None
3234
depends_on = None
3335

34-
from alembic import op
35-
import sqlalchemy as sa
36-
3736

3837
def upgrade():
39-
op.add_column('variable', sa.Column('is_encrypted', sa.Boolean,default=False))
38+
op.add_column('variable', sa.Column('is_encrypted', sa.Boolean, default=False))
4039

4140

4241
def downgrade():

airflow/migrations/versions/1b38cef5b76e_add_dagrun.py

+12-13
Original file line numberDiff line numberDiff line change
@@ -25,28 +25,27 @@
2525
2626
"""
2727

28+
from alembic import op
29+
import sqlalchemy as sa
30+
2831
# revision identifiers, used by Alembic.
2932
revision = '1b38cef5b76e'
3033
down_revision = '502898887f84'
3134
branch_labels = None
3235
depends_on = None
3336

34-
from alembic import op
35-
import sqlalchemy as sa
36-
3737

3838
def upgrade():
3939
op.create_table('dag_run',
40-
sa.Column('id', sa.Integer(), nullable=False),
41-
sa.Column('dag_id', sa.String(length=250), nullable=True),
42-
sa.Column('execution_date', sa.DateTime(), nullable=True),
43-
sa.Column('state', sa.String(length=50), nullable=True),
44-
sa.Column('run_id', sa.String(length=250), nullable=True),
45-
sa.Column('external_trigger', sa.Boolean(), nullable=True),
46-
sa.PrimaryKeyConstraint('id'),
47-
sa.UniqueConstraint('dag_id', 'execution_date'),
48-
sa.UniqueConstraint('dag_id', 'run_id'),
49-
)
40+
sa.Column('id', sa.Integer(), nullable=False),
41+
sa.Column('dag_id', sa.String(length=250), nullable=True),
42+
sa.Column('execution_date', sa.DateTime(), nullable=True),
43+
sa.Column('state', sa.String(length=50), nullable=True),
44+
sa.Column('run_id', sa.String(length=250), nullable=True),
45+
sa.Column('external_trigger', sa.Boolean(), nullable=True),
46+
sa.PrimaryKeyConstraint('id'),
47+
sa.UniqueConstraint('dag_id', 'execution_date'),
48+
sa.UniqueConstraint('dag_id', 'run_id'))
5049

5150

5251
def downgrade():

0 commit comments

Comments
 (0)