Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pydocstyle D202 added #24221

Merged
merged 1 commit into from
Jun 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ repos:
name: Run pydocstyle
args:
- --convention=pep257
- --add-ignore=D100,D102,D103,D104,D105,D107,D202,D205,D400,D401
- --add-ignore=D100,D102,D103,D104,D105,D107,D205,D400,D401
exclude: |
(?x)
^tests/.*\.py$|
Expand Down
1 change: 0 additions & 1 deletion airflow/cli/commands/dag_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,6 @@ def dag_state(args, session=NEW_SESSION):
>>> airflow dags state a_dag_with_conf_passed 2015-01-01T00:00:00.000000
failed, {"name": "bob", "age": "42"}
"""

dag = DagModel.get_dagmodel(args.dag_id, session=session)

if not dag:
Expand Down
1 change: 0 additions & 1 deletion airflow/models/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -1976,7 +1976,6 @@ def partial_subset(
:param include_direct_upstream: Include all tasks directly upstream of matched
and downstream (if include_downstream = True) tasks
"""

from airflow.models.baseoperator import BaseOperator
from airflow.models.mappedoperator import MappedOperator

Expand Down
1 change: 0 additions & 1 deletion airflow/models/mappedoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -843,7 +843,6 @@ def run_time_mapped_ti_count(self, run_id: str, *, session: Session) -> Optional
:return: None if upstream tasks are not complete yet, or else total number of mapped TIs this task
should have
"""

lengths = self._get_map_lengths(run_id, session=session)
expansion_kwargs = self._get_expansion_kwargs()

Expand Down
2 changes: 0 additions & 2 deletions airflow/providers/amazon/aws/hooks/quicksight.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ def create_ingestion(
having Ingestion ARN, HTTP status, ingestion ID and ingestion status.
:rtype: Dict
"""

self.log.info("Creating QuickSight Ingestion for data set id %s.", data_set_id)
quicksight_client = self.get_conn()
try:
Expand Down Expand Up @@ -136,7 +135,6 @@ def wait_for_state(
will check the status of QuickSight Ingestion
:return: response of describe_ingestion call after Ingestion is is done
"""

sec = 0
status = self.get_status(aws_account_id, data_set_id, ingestion_id)
while status in self.NON_TERMINAL_STATES and status != target_state:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/amazon/aws/hooks/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ def get_s3_bucket_key(
:return: the parsed bucket name and key
:rtype: tuple of str
"""

if bucket is None:
return S3Hook.parse_s3_url(key)

Expand Down
1 change: 0 additions & 1 deletion airflow/providers/amazon/aws/hooks/sts.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ def __init__(self, *args, **kwargs):

def get_account_number(self) -> str:
"""Get the account Number"""

try:
return self.get_conn().get_caller_identity()['Account']
except Exception as general_error:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/amazon/aws/sensors/rds.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def _describe_item(self, item_type: str, item_name: str) -> list:

def _check_item(self, item_type: str, item_name: str) -> bool:
"""Get certain item from `_describe_item()` and check its status"""

try:
items = self._describe_item(item_type, item_name)
except ClientError:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/cncf/kubernetes/hooks/kubernetes.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@ def _deprecation_warning_core_param(deprecation_warnings):

def get_conn(self) -> Any:
"""Returns kubernetes api session for use with requests"""

in_cluster = self._coalesce_param(
self.in_cluster, self.conn_extras.get("extra__kubernetes__in_cluster") or None
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -593,7 +593,6 @@ def _patch_deprecated_k8s_settings(self, hook: KubernetesHook):
When we find values there that we need to apply on the hook, we patch special
hook attributes here.
"""

# default for enable_tcp_keepalive is True; patch if False
if conf.getboolean('kubernetes', 'enable_tcp_keepalive') is False:
hook._deprecated_core_disable_tcp_keepalive = True
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/google/cloud/hooks/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,6 @@ def create_empty_table(
:param exists_ok: If ``True``, ignore "already exists" errors when creating the table.
:return: Created table
"""

_table_resource: Dict[str, Any] = {}

if self.location:
Expand Down
2 changes: 0 additions & 2 deletions airflow/providers/google/cloud/hooks/looker.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ def wait_for_job(

def get_looker_sdk(self):
"""Returns Looker SDK client for Looker API 4.0."""

conn = self.get_connection(self.looker_conn_id)
settings = LookerApiSettings(conn)

Expand Down Expand Up @@ -214,7 +213,6 @@ def read_config(self):
Overrides the default logic of getting connection settings. Fetches
the connection settings from Airflow's connection object.
"""

config = {}

if self.conn.host is None:
Expand Down
1 change: 0 additions & 1 deletion airflow/providers/hashicorp/secrets/vault.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def get_conn_uri(self, conn_id: str) -> Optional[str]:
:rtype: str
:return: The connection uri retrieved from the secret
"""

# Since VaultBackend implements `get_connection`, `get_conn_uri` is not used. So we
# don't need to implement (or direct users to use) method `get_conn_value` instead
warnings.warn(
Expand Down
2 changes: 0 additions & 2 deletions airflow/utils/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -861,7 +861,6 @@ def reflect_tables(tables: List[Union[Base, str]], session):
This function gets the current state of each table in the set of models provided and returns
a SqlAlchemy metadata object containing them.
"""

import sqlalchemy.schema

metadata = sqlalchemy.schema.MetaData(session.bind)
Expand Down Expand Up @@ -1173,7 +1172,6 @@ def _move_duplicate_data_to_new_table(
building the DELETE FROM join condition.
:param target_table_name: name of the table in which to park the duplicate rows
"""

bind = session.get_bind()
dialect_name = bind.dialect.name
query = (
Expand Down
1 change: 0 additions & 1 deletion airflow/utils/process_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,6 @@ def set_new_process_group() -> None:
rather than having to iterate the child processes.
If current process spawn by system call ``exec()`` than keep current process group
"""

if os.getpid() == os.getsid(0):
# If PID = SID than process a session leader, and it is not possible to change process group
return
Expand Down