|
21 | 21 |
|
22 | 22 | from airflow.contrib.operators.bigquery_operator import \
|
23 | 23 | BigQueryCreateExternalTableOperator, BigQueryCreateEmptyTableOperator, \
|
24 |
| - BigQueryDeleteDatasetOperator, BigQueryCreateEmptyDatasetOperator |
| 24 | + BigQueryDeleteDatasetOperator, BigQueryCreateEmptyDatasetOperator, \ |
| 25 | + BigQueryOperator |
25 | 26 |
|
26 | 27 | try:
|
27 | 28 | from unittest import mock
|
@@ -143,3 +144,84 @@ def test_execute(self, mock_hook):
|
143 | 144 | project_id=TEST_PROJECT_ID,
|
144 | 145 | dataset_reference={}
|
145 | 146 | )
|
| 147 | + |
| 148 | + |
| 149 | +class BigQueryOperatorTest(unittest.TestCase): |
| 150 | + @mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook') |
| 151 | + def test_execute(self, mock_hook): |
| 152 | + operator = BigQueryOperator( |
| 153 | + task_id=TASK_ID, |
| 154 | + sql='Select * from test_table', |
| 155 | + destination_dataset_table=None, |
| 156 | + write_disposition='WRITE_EMPTY', |
| 157 | + allow_large_results=False, |
| 158 | + flatten_results=None, |
| 159 | + bigquery_conn_id='bigquery_default', |
| 160 | + udf_config=None, |
| 161 | + use_legacy_sql=True, |
| 162 | + maximum_billing_tier=None, |
| 163 | + maximum_bytes_billed=None, |
| 164 | + create_disposition='CREATE_IF_NEEDED', |
| 165 | + schema_update_options=(), |
| 166 | + query_params=None, |
| 167 | + labels=None, |
| 168 | + priority='INTERACTIVE', |
| 169 | + time_partitioning=None, |
| 170 | + api_resource_configs=None, |
| 171 | + cluster_fields=None, |
| 172 | + ) |
| 173 | + |
| 174 | + operator.execute(None) |
| 175 | + mock_hook.return_value \ |
| 176 | + .get_conn() \ |
| 177 | + .cursor() \ |
| 178 | + .run_query \ |
| 179 | + .assert_called_once_with( |
| 180 | + sql='Select * from test_table', |
| 181 | + destination_dataset_table=None, |
| 182 | + write_disposition='WRITE_EMPTY', |
| 183 | + allow_large_results=False, |
| 184 | + flatten_results=None, |
| 185 | + udf_config=None, |
| 186 | + maximum_billing_tier=None, |
| 187 | + maximum_bytes_billed=None, |
| 188 | + create_disposition='CREATE_IF_NEEDED', |
| 189 | + schema_update_options=(), |
| 190 | + query_params=None, |
| 191 | + labels=None, |
| 192 | + priority='INTERACTIVE', |
| 193 | + time_partitioning=None, |
| 194 | + api_resource_configs=None, |
| 195 | + cluster_fields=None, |
| 196 | + ) |
| 197 | + |
| 198 | + @mock.patch('airflow.contrib.operators.bigquery_operator.BigQueryHook') |
| 199 | + def test_bigquery_operator_defaults(self, mock_hook): |
| 200 | + operator = BigQueryOperator( |
| 201 | + task_id=TASK_ID, |
| 202 | + sql='Select * from test_table', |
| 203 | + ) |
| 204 | + |
| 205 | + operator.execute(None) |
| 206 | + mock_hook.return_value \ |
| 207 | + .get_conn() \ |
| 208 | + .cursor() \ |
| 209 | + .run_query \ |
| 210 | + .assert_called_once_with( |
| 211 | + sql='Select * from test_table', |
| 212 | + destination_dataset_table=None, |
| 213 | + write_disposition='WRITE_EMPTY', |
| 214 | + allow_large_results=False, |
| 215 | + flatten_results=None, |
| 216 | + udf_config=None, |
| 217 | + maximum_billing_tier=None, |
| 218 | + maximum_bytes_billed=None, |
| 219 | + create_disposition='CREATE_IF_NEEDED', |
| 220 | + schema_update_options=(), |
| 221 | + query_params=None, |
| 222 | + labels=None, |
| 223 | + priority='INTERACTIVE', |
| 224 | + time_partitioning=None, |
| 225 | + api_resource_configs=None, |
| 226 | + cluster_fields=None, |
| 227 | + ) |
0 commit comments