6
6
7
7
import io .airbyte .db .jdbc .JdbcDatabase ;
8
8
import io .airbyte .integrations .base .JavaBaseConstants ;
9
+ import io .airbyte .integrations .base .sentry .AirbyteSentry ;
9
10
import io .airbyte .integrations .destination .jdbc .JdbcSqlOperations ;
10
11
import io .airbyte .integrations .destination .jdbc .SqlOperations ;
11
12
import io .airbyte .protocol .models .AirbyteRecordMessage ;
12
13
import java .io .File ;
13
14
import java .nio .file .Files ;
14
15
import java .sql .SQLException ;
15
16
import java .util .List ;
17
+ import java .util .Map ;
16
18
import java .util .UUID ;
17
19
import org .slf4j .Logger ;
18
20
import org .slf4j .LoggerFactory ;
@@ -22,48 +24,57 @@ public class SnowflakeStagingSqlOperations extends JdbcSqlOperations implements
22
24
private static final Logger LOGGER = LoggerFactory .getLogger (SnowflakeSqlOperations .class );
23
25
24
26
@ Override
25
- protected void insertRecordsInternal (JdbcDatabase database , List <AirbyteRecordMessage > records , String schemaName , String stage ) throws Exception {
27
+ protected void insertRecordsInternal (final JdbcDatabase database ,
28
+ final List <AirbyteRecordMessage > records ,
29
+ final String schemaName ,
30
+ final String stage ) {
26
31
LOGGER .info ("actual size of batch for staging: {}" , records .size ());
27
32
28
33
if (records .isEmpty ()) {
29
34
return ;
30
35
}
31
36
try {
32
37
loadDataIntoStage (database , stage , records );
33
- } catch (Exception e ) {
38
+ } catch (final Exception e ) {
34
39
LOGGER .error ("Failed to upload records into stage {}" , stage , e );
35
40
throw new RuntimeException (e );
36
41
}
37
42
}
38
43
39
- private void loadDataIntoStage (JdbcDatabase database , String stage , List <AirbyteRecordMessage > partition ) throws Exception {
44
+ private void loadDataIntoStage (final JdbcDatabase database , final String stage , final List <AirbyteRecordMessage > partition ) throws Exception {
40
45
final File tempFile = Files .createTempFile (UUID .randomUUID ().toString (), ".csv" ).toFile ();
41
46
writeBatchToFile (tempFile , partition );
42
47
database .execute (String .format ("PUT file://%s @%s PARALLEL = %d" , tempFile .getAbsolutePath (), stage , Runtime .getRuntime ().availableProcessors ()));
43
48
Files .delete (tempFile .toPath ());
44
49
}
45
50
46
51
public void createStageIfNotExists (final JdbcDatabase database , final String stageName ) throws SQLException {
47
- database .execute (String .format ("CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE')" +
48
- " copy_options = (on_error='skip_file');" , stageName ));
52
+ final String query = "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');" ;
53
+ AirbyteSentry .executeWithTracing ("CreateStageIfNotExists" ,
54
+ () -> database .execute (String .format (query , stageName )),
55
+ Map .of ("stage" , stageName ));
49
56
}
50
57
51
- public void copyIntoTmpTableFromStage (JdbcDatabase database , String stageName , String dstTableName , String schemaName ) throws SQLException {
52
- database . execute ( String . format ( "COPY INTO %s.%s FROM @%s file_format = " +
53
- "(type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = ' \" ')" ,
54
- schemaName ,
55
- dstTableName ,
56
- stageName ));
57
-
58
+ public void copyIntoTmpTableFromStage (final JdbcDatabase database , final String stageName , final String dstTableName , final String schemaName )
59
+ throws SQLException {
60
+ final String query = "COPY INTO %s.%s FROM @%s file_format = " +
61
+ "(type = csv field_delimiter = ',' skip_header = 0 FIELD_OPTIONALLY_ENCLOSED_BY = ' \" ')" ;
62
+ AirbyteSentry . executeWithTracing ( "CopyIntoTableFromStage" ,
63
+ () -> database . execute ( String . format ( query , schemaName , dstTableName , stageName )),
64
+ Map . of ( "schema" , schemaName , "stage" , stageName , "table" , dstTableName ));
58
65
}
59
66
60
67
public void dropStageIfExists (final JdbcDatabase database , final String stageName ) throws SQLException {
61
- database .execute (String .format ("DROP STAGE IF EXISTS %s;" , stageName ));
68
+ AirbyteSentry .executeWithTracing ("DropStageIfExists" ,
69
+ () -> database .execute (String .format ("DROP STAGE IF EXISTS %s;" , stageName )),
70
+ Map .of ("stage" , stageName ));
62
71
}
63
72
64
73
@ Override
65
74
public void createTableIfNotExists (final JdbcDatabase database , final String schemaName , final String tableName ) throws SQLException {
66
- database .execute (createTableQuery (database , schemaName , tableName ));
75
+ AirbyteSentry .executeWithTracing ("CreateTableIfNotExists" ,
76
+ () -> database .execute (createTableQuery (database , schemaName , tableName )),
77
+ Map .of ("schema" , schemaName , "table" , tableName ));
67
78
}
68
79
69
80
@ Override
@@ -77,12 +88,14 @@ public String createTableQuery(final JdbcDatabase database, final String schemaN
77
88
schemaName , tableName , JavaBaseConstants .COLUMN_NAME_AB_ID , JavaBaseConstants .COLUMN_NAME_DATA , JavaBaseConstants .COLUMN_NAME_EMITTED_AT );
78
89
}
79
90
80
- public void cleanUpStage (JdbcDatabase database , String path ) throws SQLException {
81
- database .execute (String .format ("REMOVE @%s;" , path ));
91
+ public void cleanUpStage (final JdbcDatabase database , final String path ) throws SQLException {
92
+ AirbyteSentry .executeWithTracing ("CleanStage" ,
93
+ () -> database .execute (String .format ("REMOVE @%s;" , path )),
94
+ Map .of ("path" , path ));
82
95
}
83
96
84
97
@ Override
85
- public boolean isSchemaExists (JdbcDatabase database , String outputSchema ) throws Exception {
98
+ public boolean isSchemaExists (final JdbcDatabase database , final String outputSchema ) throws Exception {
86
99
return database .query (SHOW_SCHEMAS ).map (schemas -> schemas .get (NAME ).asText ()).anyMatch (outputSchema ::equalsIgnoreCase );
87
100
}
88
101
0 commit comments