Skip to content

Commit 4740026

Browse files
XD-DENGkaxil
authored andcommitted
[AIRFLOW-3175] Fix docstring format in airflow/jobs.py (#4025)
These docstrings could not parsed properly in Sphinx syntax
1 parent 7b9911e commit 4740026

File tree

1 file changed

+23
-12
lines changed

1 file changed

+23
-12
lines changed

airflow/jobs.py

+23-12
Original file line numberDiff line numberDiff line change
@@ -349,10 +349,10 @@ def _launch_process(result_queue,
349349
:param file_path: the file to process
350350
:type file_path: unicode
351351
:param pickle_dags: whether to pickle the DAGs found in the file and
352-
save them to the DB
352+
save them to the DB
353353
:type pickle_dags: bool
354354
:param dag_id_white_list: if specified, only examine DAG ID's that are
355-
in this list
355+
in this list
356356
:type dag_id_white_list: list[unicode]
357357
:param thread_name: the name to use for the process that is launched
358358
:type thread_name: unicode
@@ -424,6 +424,7 @@ def start(self):
424424
def terminate(self, sigkill=False):
425425
"""
426426
Terminate (and then kill) the process launched to process the file.
427+
427428
:param sigkill: whether to issue a SIGKILL if SIGTERM doesn't work.
428429
:type sigkill: bool
429430
"""
@@ -452,6 +453,7 @@ def pid(self):
452453
def exit_code(self):
453454
"""
454455
After the process is finished, this can be called to get the return code
456+
455457
:return: the exit code of the process
456458
:rtype: int
457459
"""
@@ -463,6 +465,7 @@ def exit_code(self):
463465
def done(self):
464466
"""
465467
Check if the process launched to process this file is done.
468+
466469
:return: whether the process is finished running
467470
:rtype: bool
468471
"""
@@ -544,16 +547,18 @@ def __init__(
544547
:param dag_ids: if specified, only schedule tasks with these DAG IDs
545548
:type dag_ids: list[unicode]
546549
:param subdir: directory containing Python files with Airflow DAG
547-
definitions, or a specific path to a file
550+
definitions, or a specific path to a file
548551
:type subdir: unicode
549552
:param num_runs: The number of times to try to schedule each DAG file.
550-
-1 for unlimited within the run_duration.
553+
-1 for unlimited within the run_duration.
554+
:type num_runs: int
551555
:param processor_poll_interval: The number of seconds to wait between
552-
polls of running processors
556+
polls of running processors
557+
:type processor_poll_interval: int
553558
:param run_duration: how long to run (in seconds) before exiting
554559
:type run_duration: int
555560
:param do_pickle: once a DAG object is obtained by executing the Python
556-
file, whether to serialize the DAG object to the DB
561+
file, whether to serialize the DAG object to the DB
557562
:type do_pickle: bool
558563
"""
559564
# for BaseJob compatibility
@@ -783,7 +788,7 @@ def update_import_errors(session, dagbag):
783788
def create_dag_run(self, dag, session=None):
784789
"""
785790
This method checks whether a new DagRun needs to be created
786-
for a DAG based on scheduling interval
791+
for a DAG based on scheduling interval.
787792
Returns DagRun if one is scheduled. Otherwise returns None.
788793
"""
789794
if dag.schedule_interval:
@@ -991,7 +996,7 @@ def _change_state_for_tis_without_dagrun(self,
991996
:param new_state: set TaskInstances to this state
992997
:type new_state: State
993998
:param simple_dag_bag: TaskInstances associated with DAGs in the
994-
simple_dag_bag and with states in the old_state will be examined
999+
simple_dag_bag and with states in the old_state will be examined
9951000
:type simple_dag_bag: SimpleDagBag
9961001
"""
9971002
tis_changed = 0
@@ -1062,7 +1067,7 @@ def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
10621067
dag concurrency, executor state, and priority.
10631068
10641069
:param simple_dag_bag: TaskInstances associated with DAGs in the
1065-
simple_dag_bag will be fetched from the DB and executed
1070+
simple_dag_bag will be fetched from the DB and executed
10661071
:type simple_dag_bag: SimpleDagBag
10671072
:param executor: the executor that runs task instances
10681073
:type executor: BaseExecutor
@@ -1373,7 +1378,7 @@ def _execute_task_instances(self,
13731378
3. Enqueue the TIs in the executor.
13741379
13751380
:param simple_dag_bag: TaskInstances associated with DAGs in the
1376-
simple_dag_bag will be fetched from the DB and executed
1381+
simple_dag_bag will be fetched from the DB and executed
13771382
:type simple_dag_bag: SimpleDagBag
13781383
:param states: Execute TaskInstances in these states
13791384
:type states: Tuple[State]
@@ -1482,7 +1487,7 @@ def _log_file_processing_stats(self,
14821487
Print out stats about how files are getting processed.
14831488
14841489
:param known_file_paths: a list of file paths that may contain Airflow
1485-
DAG definitions
1490+
DAG definitions
14861491
:type known_file_paths: list[unicode]
14871492
:param processor_manager: manager for the file processors
14881493
:type stats: DagFileProcessorManager
@@ -1788,7 +1793,7 @@ def process_file(self, file_path, pickle_dags=False, session=None):
17881793
:param file_path: the path to the Python file that should be executed
17891794
:type file_path: unicode
17901795
:param pickle_dags: whether serialize the DAGs found in the file and
1791-
save them to the db
1796+
save them to the db
17921797
:type pickle_dags: bool
17931798
:return: a list of SimpleDags made from the Dags found in the file
17941799
:rtype: list[SimpleDag]
@@ -2027,6 +2032,7 @@ def _update_counters(self, ti_status):
20272032
"""
20282033
Updates the counters per state of the tasks that were running. Can re-add
20292034
to tasks to run in case required.
2035+
20302036
:param ti_status: the internal status of the backfill job tasks
20312037
:type ti_status: BackfillJob._DagRunTaskStatus
20322038
"""
@@ -2071,6 +2077,7 @@ def _manage_executor_state(self, running):
20712077
"""
20722078
Checks if the executor agrees with the state of task instances
20732079
that are running
2080+
20742081
:param running: dict of key, task to verify
20752082
"""
20762083
executor = self.executor
@@ -2102,6 +2109,7 @@ def _get_dag_run(self, run_date, session=None):
21022109
Returns a dag run for the given run date, which will be matched to an existing
21032110
dag run if available or create a new dag run otherwise. If the max_active_runs
21042111
limit is reached, this function will return None.
2112+
21052113
:param run_date: the execution date for the dag run
21062114
:type run_date: datetime
21072115
:param session: the database session object
@@ -2161,6 +2169,7 @@ def _task_instances_for_dag_run(self, dag_run, session=None):
21612169
"""
21622170
Returns a map of task instance key to task instance object for the tasks to
21632171
run in the given dag run.
2172+
21642173
:param dag_run: the dag run to get the tasks from
21652174
:type dag_run: models.DagRun
21662175
:param session: the database session object
@@ -2226,6 +2235,7 @@ def _process_backfill_task_instances(self,
22262235
Process a set of task instances from a set of dag runs. Special handling is done
22272236
to account for different task instance states that could be present when running
22282237
them in a backfill process.
2238+
22292239
:param ti_status: the internal status of the job
22302240
:type ti_status: BackfillJob._DagRunTaskStatus
22312241
:param executor: the executor to run the task instances
@@ -2463,6 +2473,7 @@ def _execute_for_run_dates(self, run_dates, ti_status, executor, pickle_id,
24632473
Computes the dag runs and their respective task instances for
24642474
the given run dates and executes the task instances.
24652475
Returns a list of execution dates of the dag runs that were executed.
2476+
24662477
:param run_dates: Execution dates for dag runs
24672478
:type run_dates: list
24682479
:param ti_status: internal BackfillJob status structure to tis track progress

0 commit comments

Comments
 (0)