52
52
import org .apache .spark .api .java .JavaSparkContext ;
53
53
import org .apache .spark .api .java .function .Function ;
54
54
import org .junit .After ;
55
+ import org .junit .AfterClass ;
55
56
import org .junit .Assert ;
56
57
import org .junit .Before ;
58
+ import org .junit .BeforeClass ;
57
59
import org .junit .ClassRule ;
58
60
import org .junit .Test ;
59
61
import org .junit .experimental .categories .Category ;
@@ -70,68 +72,68 @@ public class TestJavaHBaseContext implements Serializable {
70
72
public static final HBaseClassTestRule TIMEOUT =
71
73
HBaseClassTestRule .forClass (TestJavaHBaseContext .class );
72
74
73
- private transient JavaSparkContext jsc ;
74
- HBaseTestingUtility htu ;
75
- protected static final Logger LOG = LoggerFactory .getLogger (TestJavaHBaseContext .class );
76
-
77
-
75
+ private static transient JavaSparkContext JSC ;
76
+ private static HBaseTestingUtility TEST_UTIL ;
77
+ private static JavaHBaseContext HBASE_CONTEXT ;
78
+ private static final Logger LOG = LoggerFactory .getLogger (TestJavaHBaseContext .class );
78
79
79
80
byte [] tableName = Bytes .toBytes ("t1" );
80
81
byte [] columnFamily = Bytes .toBytes ("c" );
81
82
byte [] columnFamily1 = Bytes .toBytes ("d" );
82
83
String columnFamilyStr = Bytes .toString (columnFamily );
83
84
String columnFamilyStr1 = Bytes .toString (columnFamily1 );
84
85
86
+ @ BeforeClass
87
+ public static void setUpBeforeClass () throws Exception {
85
88
86
- @ Before
87
- public void setUp () {
88
- jsc = new JavaSparkContext ( "local" , "JavaHBaseContextSuite" );
89
+ JSC = new JavaSparkContext ( "local" , "JavaHBaseContextSuite" );
90
+ TEST_UTIL = new HBaseTestingUtility ();
91
+ Configuration conf = TEST_UTIL . getConfiguration ( );
89
92
90
- File tempDir = Files .createTempDir ();
91
- tempDir .deleteOnExit ();
93
+ HBASE_CONTEXT = new JavaHBaseContext (JSC , conf );
92
94
93
- htu = new HBaseTestingUtility ();
94
- try {
95
- LOG .info ("cleaning up test dir" );
95
+ LOG .info ("cleaning up test dir" );
96
96
97
- htu .cleanupTestDir ();
97
+ TEST_UTIL .cleanupTestDir ();
98
98
99
- LOG .info ("starting minicluster" );
99
+ LOG .info ("starting minicluster" );
100
100
101
- htu .startMiniZKCluster ();
102
- htu .startMiniHBaseCluster (1 , 1 );
101
+ TEST_UTIL .startMiniZKCluster ();
102
+ TEST_UTIL .startMiniHBaseCluster (1 , 1 );
103
103
104
- LOG .info (" - minicluster started" );
104
+ LOG .info (" - minicluster started" );
105
+ }
105
106
106
- try {
107
- htu .deleteTable (TableName .valueOf (tableName ));
108
- } catch (Exception e ) {
109
- LOG .info (" - no table " + Bytes .toString (tableName ) + " found" );
110
- }
107
+ @ AfterClass
108
+ public static void tearDownAfterClass () throws Exception {
109
+ LOG .info ("shuting down minicluster" );
110
+ TEST_UTIL .shutdownMiniHBaseCluster ();
111
+ TEST_UTIL .shutdownMiniZKCluster ();
112
+ LOG .info (" - minicluster shut down" );
113
+ TEST_UTIL .cleanupTestDir ();
111
114
112
- LOG .info (" - creating table " + Bytes .toString (tableName ));
113
- htu .createTable (TableName .valueOf (tableName ),
114
- new byte [][]{columnFamily , columnFamily1 });
115
- LOG .info (" - created table" );
116
- } catch (Exception e1 ) {
117
- throw new RuntimeException (e1 );
118
- }
115
+ JSC .stop ();
116
+ JSC = null ;
119
117
}
120
118
121
- @ After
122
- public void tearDown () {
119
+ @ Before
120
+ public void setUp () throws Exception {
121
+
123
122
try {
124
- htu .deleteTable (TableName .valueOf (tableName ));
125
- LOG .info ("shuting down minicluster" );
126
- htu .shutdownMiniHBaseCluster ();
127
- htu .shutdownMiniZKCluster ();
128
- LOG .info (" - minicluster shut down" );
129
- htu .cleanupTestDir ();
123
+ TEST_UTIL .deleteTable (TableName .valueOf (tableName ));
130
124
} catch (Exception e ) {
131
- throw new RuntimeException ( e );
125
+ LOG . info ( " - no table {} found" , Bytes . toString ( tableName ) );
132
126
}
133
- jsc .stop ();
134
- jsc = null ;
127
+
128
+ LOG .info (" - creating table {}" , Bytes .toString (tableName ));
129
+ TEST_UTIL .createTable (TableName .valueOf (tableName ),
130
+ new byte [][]{columnFamily , columnFamily1 });
131
+ LOG .info (" - created table" );
132
+ }
133
+
134
+ @ After
135
+ public void tearDown () throws Exception {
136
+ TEST_UTIL .deleteTable (TableName .valueOf (tableName ));
135
137
}
136
138
137
139
@ Test
@@ -144,11 +146,9 @@ public void testBulkPut() throws IOException {
144
146
list .add ("4," + columnFamilyStr + ",a,4" );
145
147
list .add ("5," + columnFamilyStr + ",a,5" );
146
148
147
- JavaRDD <String > rdd = jsc .parallelize (list );
148
-
149
- Configuration conf = htu .getConfiguration ();
149
+ JavaRDD <String > rdd = JSC .parallelize (list );
150
150
151
- JavaHBaseContext hbaseContext = new JavaHBaseContext ( jsc , conf );
151
+ Configuration conf = TEST_UTIL . getConfiguration ( );
152
152
153
153
Connection conn = ConnectionFactory .createConnection (conf );
154
154
Table table = conn .getTable (TableName .valueOf (tableName ));
@@ -163,7 +163,7 @@ public void testBulkPut() throws IOException {
163
163
table .close ();
164
164
}
165
165
166
- hbaseContext .bulkPut (rdd ,
166
+ HBASE_CONTEXT .bulkPut (rdd ,
167
167
TableName .valueOf (tableName ),
168
168
new PutFunction ());
169
169
@@ -212,15 +212,13 @@ public void testBulkDelete() throws IOException {
212
212
list .add (Bytes .toBytes ("2" ));
213
213
list .add (Bytes .toBytes ("3" ));
214
214
215
- JavaRDD <byte []> rdd = jsc .parallelize (list );
215
+ JavaRDD <byte []> rdd = JSC .parallelize (list );
216
216
217
- Configuration conf = htu .getConfiguration ();
217
+ Configuration conf = TEST_UTIL .getConfiguration ();
218
218
219
219
populateTableWithMockData (conf , TableName .valueOf (tableName ));
220
220
221
- JavaHBaseContext hbaseContext = new JavaHBaseContext (jsc , conf );
222
-
223
- hbaseContext .bulkDelete (rdd , TableName .valueOf (tableName ),
221
+ HBASE_CONTEXT .bulkDelete (rdd , TableName .valueOf (tableName ),
224
222
new JavaHBaseBulkDeleteExample .DeleteFunction (), 2 );
225
223
226
224
@@ -248,17 +246,15 @@ public void testBulkDelete() throws IOException {
248
246
249
247
@ Test
250
248
public void testDistributedScan () throws IOException {
251
- Configuration conf = htu .getConfiguration ();
249
+ Configuration conf = TEST_UTIL .getConfiguration ();
252
250
253
251
populateTableWithMockData (conf , TableName .valueOf (tableName ));
254
252
255
- JavaHBaseContext hbaseContext = new JavaHBaseContext (jsc , conf );
256
-
257
253
Scan scan = new Scan ();
258
254
scan .setCaching (100 );
259
255
260
256
JavaRDD <String > javaRdd =
261
- hbaseContext .hbaseRDD (TableName .valueOf (tableName ), scan )
257
+ HBASE_CONTEXT .hbaseRDD (TableName .valueOf (tableName ), scan )
262
258
.map (new ScanConvertFunction ());
263
259
264
260
List <String > results = javaRdd .collect ();
@@ -283,16 +279,14 @@ public void testBulkGet() throws IOException {
283
279
list .add (Bytes .toBytes ("4" ));
284
280
list .add (Bytes .toBytes ("5" ));
285
281
286
- JavaRDD <byte []> rdd = jsc .parallelize (list );
282
+ JavaRDD <byte []> rdd = JSC .parallelize (list );
287
283
288
- Configuration conf = htu .getConfiguration ();
284
+ Configuration conf = TEST_UTIL .getConfiguration ();
289
285
290
286
populateTableWithMockData (conf , TableName .valueOf (tableName ));
291
287
292
- JavaHBaseContext hbaseContext = new JavaHBaseContext (jsc , conf );
293
-
294
288
final JavaRDD <String > stringJavaRDD =
295
- hbaseContext .bulkGet (TableName .valueOf (tableName ), 2 , rdd ,
289
+ HBASE_CONTEXT .bulkGet (TableName .valueOf (tableName ), 2 , rdd ,
296
290
new GetFunction (),
297
291
new ResultFunction ());
298
292
@@ -302,7 +296,7 @@ public void testBulkGet() throws IOException {
302
296
@ Test
303
297
public void testBulkLoad () throws Exception {
304
298
305
- Path output = htu .getDataTestDir ("testBulkLoad" );
299
+ Path output = TEST_UTIL .getDataTestDir ("testBulkLoad" );
306
300
// Add cell as String: "row,falmily,qualifier,value"
307
301
List <String > list = new ArrayList <String >();
308
302
// row1
@@ -315,14 +309,11 @@ public void testBulkLoad() throws Exception {
315
309
list .add ("2," + columnFamilyStr + ",a,3" );
316
310
list .add ("2," + columnFamilyStr + ",b,3" );
317
311
318
- JavaRDD <String > rdd = jsc .parallelize (list );
319
-
320
- Configuration conf = htu .getConfiguration ();
321
- JavaHBaseContext hbaseContext = new JavaHBaseContext (jsc , conf );
322
-
312
+ JavaRDD <String > rdd = JSC .parallelize (list );
323
313
314
+ Configuration conf = TEST_UTIL .getConfiguration ();
324
315
325
- hbaseContext .bulkLoad (rdd , TableName .valueOf (tableName ), new BulkLoadFunction (),
316
+ HBASE_CONTEXT .bulkLoad (rdd , TableName .valueOf (tableName ), new BulkLoadFunction (),
326
317
output .toUri ().getPath (), new HashMap <byte [], FamilyHFileWriteOptions >(), false ,
327
318
HConstants .DEFAULT_MAX_FILE_SIZE );
328
319
@@ -369,7 +360,7 @@ public void testBulkLoad() throws Exception {
369
360
370
361
@ Test
371
362
public void testBulkLoadThinRows () throws Exception {
372
- Path output = htu .getDataTestDir ("testBulkLoadThinRows" );
363
+ Path output = TEST_UTIL .getDataTestDir ("testBulkLoadThinRows" );
373
364
// because of the limitation of scala bulkLoadThinRows API
374
365
// we need to provide data as <row, all cells in that row>
375
366
List <List <String >> list = new ArrayList <List <String >>();
@@ -389,12 +380,11 @@ public void testBulkLoadThinRows() throws Exception {
389
380
list2 .add ("2," + columnFamilyStr + ",b,3" );
390
381
list .add (list2 );
391
382
392
- JavaRDD <List <String >> rdd = jsc .parallelize (list );
383
+ JavaRDD <List <String >> rdd = JSC .parallelize (list );
393
384
394
- Configuration conf = htu .getConfiguration ();
395
- JavaHBaseContext hbaseContext = new JavaHBaseContext (jsc , conf );
385
+ Configuration conf = TEST_UTIL .getConfiguration ();
396
386
397
- hbaseContext .bulkLoadThinRows (rdd , TableName .valueOf (tableName ), new BulkLoadThinRowsFunction (),
387
+ HBASE_CONTEXT .bulkLoadThinRows (rdd , TableName .valueOf (tableName ), new BulkLoadThinRowsFunction (),
398
388
output .toString (), new HashMap <byte [], FamilyHFileWriteOptions >(), false ,
399
389
HConstants .DEFAULT_MAX_FILE_SIZE );
400
390
0 commit comments