Skip to content

Commit b78c7fb

Browse files
seratchTao Feng
authored and
Tao Feng
committed
[AIRFLOW-2889] Fix typos detected by github.com/client9/misspell (#3732)
1 parent f999ce2 commit b78c7fb

22 files changed

+28
-28
lines changed

airflow/contrib/example_dags/example_databricks_operator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
# the spark jar task will NOT run until the notebook task completes
3333
# successfully.
3434
#
35-
# The definition of a succesful run is if the run has a result_state of "SUCCESS".
35+
# The definition of a successful run is if the run has a result_state of "SUCCESS".
3636
# For more information about the state of a run refer to
3737
# https://docs.databricks.com/api/latest/jobs.html#runstate
3838

airflow/contrib/hooks/azure_fileshare_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def list_directories_and_files(self, share_name, directory_name=None, **kwargs):
100100

101101
def create_directory(self, share_name, directory_name, **kwargs):
102102
"""
103-
Create a new direcotry on a Azure File Share.
103+
Create a new directory on a Azure File Share.
104104
105105
:param share_name: Name of the share.
106106
:type share_name: str

airflow/contrib/hooks/bigquery_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -627,7 +627,7 @@ def run_query(self,
627627

628628
if query_params:
629629
if self.use_legacy_sql:
630-
raise ValueError("Query paramaters are not allowed when using "
630+
raise ValueError("Query parameters are not allowed when using "
631631
"legacy SQL")
632632
else:
633633
configuration['query']['queryParameters'] = query_params

airflow/contrib/hooks/emr_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
class EmrHook(AwsHook):
2525
"""
26-
Interact with AWS EMR. emr_conn_id is only neccessary for using the
26+
Interact with AWS EMR. emr_conn_id is only necessary for using the
2727
create_job_flow method.
2828
"""
2929

airflow/contrib/hooks/gcp_dataproc_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -235,6 +235,6 @@ def wait(self, operation):
235235
DataProcHook,
236236
"await",
237237
deprecation.deprecated(
238-
DataProcHook.wait, "renamed to 'wait' for Python3.7 compatability"
238+
DataProcHook.wait, "renamed to 'wait' for Python3.7 compatibility"
239239
),
240240
)

airflow/contrib/hooks/qubole_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def execute(self, context):
125125

126126
def kill(self, ti):
127127
"""
128-
Kill (cancel) a Qubole commmand
128+
Kill (cancel) a Qubole command
129129
:param ti: Task Instance of the dag, used to determine the Quboles command id
130130
:return: response from Qubole
131131
"""

airflow/contrib/hooks/salesforce_hook.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -53,14 +53,14 @@ def __init__(
5353
5454
:param conn_id: the name of the connection that has the parameters
5555
we need to connect to Salesforce.
56-
The conenction shoud be type `http` and include a
56+
The connection shoud be type `http` and include a
5757
user's security token in the `Extras` field.
5858
.. note::
5959
For the HTTP connection type, you can include a
6060
JSON structure in the `Extras` field.
6161
We need a user's security token to connect to Salesforce.
6262
So we define it in the `Extras` field as:
63-
`{"security_token":"YOUR_SECRUITY_TOKEN"}`
63+
`{"security_token":"YOUR_SECURITY_TOKEN"}`
6464
"""
6565
self.conn_id = conn_id
6666
self._args = args

airflow/contrib/operators/gcs_to_bq.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator):
8686
for other formats.
8787
:type allow_jagged_rows: bool
8888
:param max_id_key: If set, the name of a column in the BigQuery table
89-
that's to be loaded. Thsi will be used to select the MAX value from
89+
that's to be loaded. This will be used to select the MAX value from
9090
BigQuery after the load occurs. The results will be returned by the
9191
execute() command, which in turn gets stored in XCom for future
9292
operators to use. This can be helpful with incremental loads--during

airflow/contrib/operators/mlengine_operator_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def validate_err_and_count(summary):
160160
then the `dag`'s `default_args['model_name']` will be used.
161161
:type model_name: string
162162
163-
:param version_name: Used to indicate a model version to use for prediciton,
163+
:param version_name: Used to indicate a model version to use for prediction,
164164
in combination with model_name. Cannot be used together with model_uri.
165165
See MLEngineBatchPredictionOperator for more detail. If None, then the
166166
`dag`'s `default_args['version_name']` will be used.

airflow/contrib/operators/qubole_check_operator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator):
2828
"""
2929
Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects
3030
a command that will be executed on QDS.
31-
By default, each value on first row of the result of this Qubole Commmand
31+
By default, each value on first row of the result of this Qubole Command
3232
is evaluated using python ``bool`` casting. If any of the
3333
values return ``False``, the check is failed and errors out.
3434

airflow/contrib/plugins/metastore_browser/templates/metastore_browser/table.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ <h4>
2828
<li role="presentation" class="active"><a href="#home" aria-controls="fields" role="tab" data-toggle="tab">Fields</a></li>
2929
<li role="presentation"><a href="#data" aria-controls="data" role="tab" data-toggle="tab">Sample Data</a></li>
3030
<li role="presentation"><a href="#partitions" aria-controls="partitions" role="tab" data-toggle="tab">Partitions</a></li>
31-
<li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Atributes</a></li>
31+
<li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Attributes</a></li>
3232
<li role="presentation"><a href="#parameters" aria-controls="parameters" role="tab" data-toggle="tab">Parameters</a></li>
3333
<li role="presentation"><a href="#ddl" aria-controls="ddl" role="tab" data-toggle="tab">DDL</a></li>
3434
</ul>

airflow/hooks/mysql_hook.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def bulk_dump(self, table, tmp_file):
121121
def _serialize_cell(cell, conn):
122122
"""
123123
MySQLdb converts an argument to a literal
124-
when passing those seperately to execute. Hence, this method does nothing.
124+
when passing those separately to execute. Hence, this method does nothing.
125125
126126
:param cell: The cell to insert into the table
127127
:type cell: object

airflow/models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -942,7 +942,7 @@ def init_on_load(self):
942942
@property
943943
def try_number(self):
944944
"""
945-
Return the try number that this task number will be when it is acutally
945+
Return the try number that this task number will be when it is actually
946946
run.
947947
948948
If the TI is currently running, this will match the column in the

airflow/operators/hive_to_druid.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def construct_ingest_query(self, static_path, columns):
164164
:type columns: list
165165
"""
166166

167-
# backward compatibilty for num_shards,
167+
# backward compatibility for num_shards,
168168
# but target_partition_size is the default setting
169169
# and overwrites the num_shards
170170
num_shards = self.num_shards

airflow/sensors/hdfs_sensor.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -88,12 +88,12 @@ def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
8888
if ignore_copying:
8989
log = LoggingMixin().log
9090
regex_builder = "^.*\.(%s$)$" % '$|'.join(ignored_ext)
91-
ignored_extentions_regex = re.compile(regex_builder)
91+
ignored_extensions_regex = re.compile(regex_builder)
9292
log.debug(
9393
'Filtering result for ignored extensions: %s in files %s',
94-
ignored_extentions_regex.pattern, map(lambda x: x['path'], result)
94+
ignored_extensions_regex.pattern, map(lambda x: x['path'], result)
9595
)
96-
result = [x for x in result if not ignored_extentions_regex.match(x['path'])]
96+
result = [x for x in result if not ignored_extensions_regex.match(x['path'])]
9797
log.debug('HdfsSensor.poke: after ext filter result is %s', result)
9898
return result
9999

dev/airflow-pr

+3-3
Original file line numberDiff line numberDiff line change
@@ -714,8 +714,8 @@ def standardize_jira_ref(text, only_jira=False):
714714
'[AIRFLOW-5954][MLLIB] Top by key'
715715
>>> standardize_jira_ref("[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl")
716716
'[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl'
717-
>>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility accross versions.")
718-
'[AIRFLOW-1094] Support MiMa for reporting binary compatibility accross versions.'
717+
>>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility across versions.")
718+
'[AIRFLOW-1094] Support MiMa for reporting binary compatibility across versions.'
719719
>>> standardize_jira_ref("[WIP] [AIRFLOW-1146] Vagrant support for Spark")
720720
'[AIRFLOW-1146][WIP] Vagrant support for Spark'
721721
>>> standardize_jira_ref("AIRFLOW-1032. If Yarn app fails before registering, app master stays aroun...")
@@ -942,7 +942,7 @@ def cli():
942942
status = run_cmd('git status --porcelain', echo_cmd=False)
943943
if status:
944944
msg = (
945-
'You have uncomitted changes in this branch. Running this tool\n'
945+
'You have uncommitted changes in this branch. Running this tool\n'
946946
'will delete them permanently. Continue?')
947947
if click.confirm(click.style(msg, fg='red', bold=True)):
948948
run_cmd('git reset --hard', echo_cmd=False)

docs/howto/write-logs.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ directory.
1111
In addition, users can supply a remote location for storing logs and log
1212
backups in cloud storage.
1313

14-
In the Airflow Web UI, local logs take precedance over remote logs. If local logs
14+
In the Airflow Web UI, local logs take precedence over remote logs. If local logs
1515
can not be found or accessed, the remote logs will be displayed. Note that logs
1616
are only sent to remote storage once a task completes (including failure). In other
1717
words, remote logs for running tasks are unavailable. Logs are stored in the log

scripts/ci/kubernetes/kube/secrets.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,6 @@ metadata:
2020
name: airflow-secrets
2121
type: Opaque
2222
data:
23-
# The sql_alchemy_conn value is a base64 encoded represenation of this connection string:
23+
# The sql_alchemy_conn value is a base64 encoded representation of this connection string:
2424
# postgresql+psycopg2://root:root@postgres-airflow:5432/airflow
2525
sql_alchemy_conn: cG9zdGdyZXNxbCtwc3ljb3BnMjovL3Jvb3Q6cm9vdEBwb3N0Z3Jlcy1haXJmbG93OjU0MzIvYWlyZmxvdwo=

tests/contrib/hooks/test_bigquery_hook.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -52,12 +52,12 @@ def test_throws_exception_with_invalid_query(self):
5252
self.assertIn('Reason: ', str(context.exception), "")
5353

5454
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
55-
def test_suceeds_with_explicit_legacy_query(self):
55+
def test_succeeds_with_explicit_legacy_query(self):
5656
df = self.instance.get_pandas_df('select 1', dialect='legacy')
5757
self.assertEqual(df.iloc(0)[0][0], 1)
5858

5959
@unittest.skipIf(not bq_available, 'BQ is not available to run tests')
60-
def test_suceeds_with_explicit_std_query(self):
60+
def test_succeeds_with_explicit_std_query(self):
6161
df = self.instance.get_pandas_df(
6262
'select * except(b) from (select 1 a, 2 b)', dialect='standard')
6363
self.assertEqual(df.iloc(0)[0][0], 1)

tests/contrib/operators/test_ecs_operator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ def test_check_success_tasks_raises_pending(self):
181181
self.assertIn("'lastStatus': 'PENDING'", str(e.exception))
182182
client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])
183183

184-
def test_check_success_tasks_raises_mutliple(self):
184+
def test_check_success_tasks_raises_multiple(self):
185185
client_mock = mock.Mock()
186186
self.ecs.client = client_mock
187187
self.ecs.arn = 'arn'

tests/core.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -831,7 +831,7 @@ def test_bad_trigger_rule(self):
831831
with self.assertRaises(AirflowException):
832832
DummyOperator(
833833
task_id='test_bad_trigger',
834-
trigger_rule="non_existant",
834+
trigger_rule="non_existent",
835835
dag=self.dag)
836836

837837
def test_terminate_task(self):

tests/models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def test_dag_as_context_manager(self):
9797
"""
9898
Test DAG as a context manager.
9999
When used as a context manager, Operators are automatically added to
100-
the DAG (unless they specifiy a different DAG)
100+
the DAG (unless they specify a different DAG)
101101
"""
102102
dag = DAG(
103103
'dag',

0 commit comments

Comments
 (0)