|
20 | 20 |
|
21 | 21 | import unittest
|
22 | 22 |
|
23 |
| -from airflow.contrib.operators.dataflow_operator import DataFlowPythonOperator, \ |
24 |
| - DataFlowJavaOperator, DataflowTemplateOperator |
25 |
| -from airflow.contrib.operators.dataflow_operator import DataFlowPythonOperator |
| 23 | +from airflow.contrib.operators.dataflow_operator import \ |
| 24 | + DataFlowPythonOperator, DataFlowJavaOperator, \ |
| 25 | + DataflowTemplateOperator, GoogleCloudBucketHelper |
| 26 | + |
26 | 27 | from airflow.version import version
|
27 | 28 |
|
28 | 29 | try:
|
@@ -186,3 +187,25 @@ def test_exec(self, dataflow_mock):
|
186 | 187 | }
|
187 | 188 | start_template_hook.assert_called_once_with(TASK_ID, expected_options,
|
188 | 189 | PARAMETERS, TEMPLATE)
|
| 190 | + |
| 191 | + |
| 192 | +class GoogleCloudBucketHelperTest(unittest.TestCase): |
| 193 | + |
| 194 | + @mock.patch( |
| 195 | + 'airflow.contrib.operators.dataflow_operator.GoogleCloudBucketHelper.__init__' |
| 196 | + ) |
| 197 | + def test_invalid_object_path(self, mock_parent_init): |
| 198 | + |
| 199 | + # This is just the path of a bucket hence invalid filename |
| 200 | + file_name = 'gs://test-bucket' |
| 201 | + mock_parent_init.return_value = None |
| 202 | + |
| 203 | + gcs_bucket_helper = GoogleCloudBucketHelper() |
| 204 | + gcs_bucket_helper._gcs_hook = mock.Mock() |
| 205 | + |
| 206 | + with self.assertRaises(Exception) as context: |
| 207 | + gcs_bucket_helper.google_cloud_to_local(file_name) |
| 208 | + |
| 209 | + self.assertEquals( |
| 210 | + 'Invalid Google Cloud Storage (GCS) object path: {}.'.format(file_name), |
| 211 | + str(context.exception)) |
0 commit comments