@@ -53,6 +53,8 @@ def tearDown(self):
53
53
for table in drop_tables :
54
54
conn .execute ("DROP TABLE IF EXISTS {}" .format (table ))
55
55
56
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
57
+ "This is a MySQL test" )
56
58
def test_mysql_operator_test (self ):
57
59
sql = """
58
60
CREATE TABLE IF NOT EXISTS test_airflow (
@@ -66,8 +68,11 @@ def test_mysql_operator_test(self):
66
68
dag = self .dag )
67
69
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
68
70
71
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
72
+ "This is a MySQL test" )
69
73
def test_mysql_operator_test_multi (self ):
70
74
sql = [
75
+ "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))" ,
71
76
"TRUNCATE TABLE test_airflow" ,
72
77
"INSERT INTO test_airflow VALUES ('X')" ,
73
78
]
@@ -79,6 +84,8 @@ def test_mysql_operator_test_multi(self):
79
84
)
80
85
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
81
86
87
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
88
+ "This is a MySQL test" )
82
89
def test_mysql_hook_test_bulk_load (self ):
83
90
records = ("foo" , "bar" , "baz" )
84
91
@@ -101,6 +108,8 @@ def test_mysql_hook_test_bulk_load(self):
101
108
results = tuple (result [0 ] for result in c .fetchall ())
102
109
self .assertEqual (sorted (results ), sorted (records ))
103
110
111
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
112
+ "This is a MySQL test" )
104
113
def test_mysql_hook_test_bulk_dump (self ):
105
114
from airflow .hooks .mysql_hook import MySqlHook
106
115
hook = MySqlHook ('airflow_db' )
@@ -112,6 +121,8 @@ def test_mysql_hook_test_bulk_dump(self):
112
121
self .skipTest ("Skip test_mysql_hook_test_bulk_load "
113
122
"since file output is not permitted" )
114
123
124
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
125
+ "This is a MySQL test" )
115
126
@mock .patch ('airflow.hooks.mysql_hook.MySqlHook.get_conn' )
116
127
def test_mysql_hook_test_bulk_dump_mock (self , mock_get_conn ):
117
128
mock_execute = mock .MagicMock ()
@@ -131,6 +142,8 @@ def test_mysql_hook_test_bulk_dump_mock(self, mock_get_conn):
131
142
""" .format (tmp_file = tmp_file , table = table )
132
143
assertEqualIgnoreMultipleSpaces (self , mock_execute .call_args [0 ][0 ], query )
133
144
145
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
146
+ "This is a MySQL test" )
134
147
def test_mysql_to_mysql (self ):
135
148
sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
136
149
from airflow .operators .generic_transfer import GenericTransfer
@@ -148,6 +161,8 @@ def test_mysql_to_mysql(self):
148
161
dag = self .dag )
149
162
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
150
163
164
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
165
+ "This is a MySQL test" )
151
166
def test_overwrite_schema (self ):
152
167
"""
153
168
Verifies option to overwrite connection schema
@@ -177,6 +192,16 @@ def setUp(self):
177
192
dag = DAG (TEST_DAG_ID , default_args = args )
178
193
self .dag = dag
179
194
195
+ def tearDown (self ):
196
+ tables_to_drop = ['test_postgres_to_postgres' , 'test_airflow' ]
197
+ from airflow .hooks .postgres_hook import PostgresHook
198
+ with PostgresHook ().get_conn () as conn :
199
+ with conn .cursor () as cur :
200
+ for t in tables_to_drop :
201
+ cur .execute ("DROP TABLE IF EXISTS {}" .format (t ))
202
+
203
+ @unittest .skipUnless ('postgres' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
204
+ "This is a Postgres test" )
180
205
def test_postgres_operator_test (self ):
181
206
sql = """
182
207
CREATE TABLE IF NOT EXISTS test_airflow (
@@ -197,8 +222,11 @@ def test_postgres_operator_test(self):
197
222
end_date = DEFAULT_DATE ,
198
223
ignore_ti_state = True )
199
224
225
+ @unittest .skipUnless ('postgres' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
226
+ "This is a Postgres test" )
200
227
def test_postgres_operator_test_multi (self ):
201
228
sql = [
229
+ "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))" ,
202
230
"TRUNCATE TABLE test_airflow" ,
203
231
"INSERT INTO test_airflow VALUES ('X')" ,
204
232
]
@@ -207,6 +235,8 @@ def test_postgres_operator_test_multi(self):
207
235
task_id = 'postgres_operator_test_multi' , sql = sql , dag = self .dag )
208
236
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
209
237
238
+ @unittest .skipUnless ('postgres' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
239
+ "This is a Postgres test" )
210
240
def test_postgres_to_postgres (self ):
211
241
sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
212
242
from airflow .operators .generic_transfer import GenericTransfer
@@ -224,6 +254,8 @@ def test_postgres_to_postgres(self):
224
254
dag = self .dag )
225
255
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
226
256
257
+ @unittest .skipUnless ('postgres' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
258
+ "This is a Postgres test" )
227
259
def test_vacuum (self ):
228
260
"""
229
261
Verifies the VACUUM operation runs well with the PostgresOperator
@@ -238,6 +270,8 @@ def test_vacuum(self):
238
270
autocommit = True )
239
271
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
240
272
273
+ @unittest .skipUnless ('postgres' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
274
+ "This is a Postgres test" )
241
275
def test_overwrite_schema (self ):
242
276
"""
243
277
Verifies option to overwrite connection schema
@@ -343,11 +377,15 @@ def tearDown(self):
343
377
with MySqlHook ().get_conn () as cur :
344
378
cur .execute ("DROP TABLE IF EXISTS baby_names CASCADE;" )
345
379
380
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
381
+ "This is a MySQL test" )
346
382
def test_clear (self ):
347
383
self .dag .clear (
348
384
start_date = DEFAULT_DATE ,
349
385
end_date = timezone .utcnow ())
350
386
387
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
388
+ "This is a MySQL test" )
351
389
def test_mysql_to_hive (self ):
352
390
from airflow .operators .mysql_to_hive import MySqlToHiveTransfer
353
391
sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -361,6 +399,8 @@ def test_mysql_to_hive(self):
361
399
dag = self .dag )
362
400
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
363
401
402
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
403
+ "This is a MySQL test" )
364
404
def test_mysql_to_hive_partition (self ):
365
405
from airflow .operators .mysql_to_hive import MySqlToHiveTransfer
366
406
sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -376,6 +416,8 @@ def test_mysql_to_hive_partition(self):
376
416
dag = self .dag )
377
417
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
378
418
419
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
420
+ "This is a MySQL test" )
379
421
def test_mysql_to_hive_tblproperties (self ):
380
422
from airflow .operators .mysql_to_hive import MySqlToHiveTransfer
381
423
sql = "SELECT * FROM baby_names LIMIT 1000;"
@@ -390,6 +432,8 @@ def test_mysql_to_hive_tblproperties(self):
390
432
dag = self .dag )
391
433
t .run (start_date = DEFAULT_DATE , end_date = DEFAULT_DATE , ignore_ti_state = True )
392
434
435
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
436
+ "This is a MySQL test" )
393
437
@mock .patch ('airflow.hooks.hive_hooks.HiveCliHook.load_file' )
394
438
def test_mysql_to_hive_type_conversion (self , mock_load_file ):
395
439
mysql_table = 'test_mysql_to_hive'
@@ -433,6 +477,8 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file):
433
477
with m .get_conn () as c :
434
478
c .execute ("DROP TABLE IF EXISTS {}" .format (mysql_table ))
435
479
480
+ @unittest .skipUnless ('mysql' in configuration .conf .get ('core' , 'sql_alchemy_conn' ),
481
+ "This is a MySQL test" )
436
482
def test_mysql_to_hive_verify_loaded_values (self ):
437
483
mysql_table = 'test_mysql_to_hive'
438
484
hive_table = 'test_mysql_to_hive'
0 commit comments