Skip to content

Commit 8da8d2b

Browse files
authored
HBASE-23579 Fixed Checkstyle issues
Signed-off-by: Peter Somogyi <psomogyi@apache.org> Signed-off-by: Xu Cang <xucang@apache.org>
1 parent 2c73b5e commit 8da8d2b

File tree

8 files changed

+32
-34
lines changed

8 files changed

+32
-34
lines changed

kafka/hbase-kafka-proxy/pom.xml

+4
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,10 @@
5757
<skipAssembly>true</skipAssembly>
5858
</configuration>
5959
</plugin>
60+
<plugin>
61+
<groupId>org.apache.maven.plugins</groupId>
62+
<artifactId>maven-checkstyle-plugin</artifactId>
63+
</plugin>
6064
</plugins>
6165
</build>
6266
<dependencies>

kafka/hbase-kafka-proxy/src/main/java/org/apache/hadoop/hbase/kafka/KafkaProxy.java

+11-15
Original file line numberDiff line numberDiff line change
@@ -159,9 +159,7 @@ public static void main(String[] args) throws Exception {
159159
commandLineConf.clear();
160160

161161
GenericOptionsParser parser = new GenericOptionsParser(commandLineConf, args);
162-
String restArgs[] =parser.getRemainingArgs();
163-
164-
162+
String[] restArgs = parser.getRemainingArgs();
165163

166164
try {
167165
commandLine = new BasicParser().parse(options, restArgs);
@@ -279,10 +277,8 @@ public static void setupZookeeperZnodes(CuratorFramework zk, String rootZnode,St
279277
byte []uuidBytes = Bytes.toBytes(newValue);
280278
String idPath=rootZnode+"/hbaseid";
281279
if (zk.checkExists().forPath(idPath) == null) {
282-
// zk.create().creatingParentsIfNeeded().forPath(rootZnode +
283-
// "/hbaseid",uuidBytes);
284-
zk.create().forPath(rootZnode);
285-
zk.create().forPath(rootZnode +"/hbaseid",uuidBytes);
280+
zk.create().forPath(rootZnode);
281+
zk.create().forPath(rootZnode +"/hbaseid",uuidBytes);
286282
} else {
287283
// If the znode is there already make sure it has the
288284
// expected value for the peer name.
@@ -340,14 +336,14 @@ public static void checkForOrCreateReplicationPeer(Configuration hbaseConf,
340336

341337
if (peerThere) {
342338
if (enablePeer){
343-
LOG.info("enable peer," + peerName);
344-
List<ReplicationPeerDescription> peers = admin.listReplicationPeers().stream()
345-
.filter((peer)->peer.getPeerId().equals(peerName))
346-
.filter((peer)->peer.isEnabled()==false)
347-
.collect(Collectors.toList());
348-
if (!peers.isEmpty()){
349-
admin.enableReplicationPeer(peerName);
350-
}
339+
LOG.info("enable peer,{}", peerName);
340+
List<ReplicationPeerDescription> peers = admin.listReplicationPeers().stream()
341+
.filter(peer -> peer.getPeerId().equals(peerName))
342+
.filter(peer -> !peer.isEnabled())
343+
.collect(Collectors.toList());
344+
if (!peers.isEmpty()){
345+
admin.enableReplicationPeer(peerName);
346+
}
351347
}
352348
break;
353349
} else {

kafka/hbase-kafka-proxy/src/test/java/org/apache/hadoop/hbase/kafka/TestQualifierMatching.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,13 @@
1414
*/
1515
package org.apache.hadoop.hbase.kafka;
1616

17+
import java.nio.charset.StandardCharsets;
18+
1719
import org.apache.hadoop.hbase.testclassification.SmallTests;
1820
import org.junit.Assert;
1921
import org.junit.Test;
2022
import org.junit.experimental.categories.Category;
2123

22-
import java.nio.charset.StandardCharsets;
23-
2424
/**
2525
* Make sure match rules work
2626
*/

pom.xml

+10
Original file line numberDiff line numberDiff line change
@@ -418,10 +418,20 @@
418418
<version>${checkstyle.version}</version>
419419
</dependency>
420420
</dependencies>
421+
<executions>
422+
<execution>
423+
<id>checkstyle</id>
424+
<phase>validate</phase>
425+
<goals>
426+
<goal>check</goal>
427+
</goals>
428+
</execution>
429+
</executions>
421430
<configuration>
422431
<configLocation>hbase/checkstyle.xml</configLocation>
423432
<suppressionsLocation>hbase/checkstyle-suppressions.xml</suppressionsLocation>
424433
<includeTestSourceDirectory>true</includeTestSourceDirectory>
434+
<failOnViolation>true</failOnViolation>
425435
</configuration>
426436
</plugin>
427437
</plugins>

spark/hbase-spark-it/pom.xml

-3
Original file line numberDiff line numberDiff line change
@@ -160,9 +160,6 @@
160160
<plugin>
161161
<groupId>org.apache.maven.plugins</groupId>
162162
<artifactId>maven-checkstyle-plugin</artifactId>
163-
<configuration>
164-
<failOnViolation>true</failOnViolation>
165-
</configuration>
166163
</plugin>
167164
<plugin>
168165
<groupId>net.revelc.code</groupId>

spark/hbase-spark/pom.xml

-3
Original file line numberDiff line numberDiff line change
@@ -234,9 +234,6 @@
234234
<plugin>
235235
<groupId>org.apache.maven.plugins</groupId>
236236
<artifactId>maven-checkstyle-plugin</artifactId>
237-
<configuration>
238-
<failOnViolation>true</failOnViolation>
239-
</configuration>
240237
</plugin>
241238
<plugin>
242239
<groupId>net.revelc.code</groupId>

spark/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java

+2-4
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17-
1817
package org.apache.hadoop.hbase.spark;
1918

2019
import java.io.IOException;
@@ -32,17 +31,16 @@
3231
import org.apache.hadoop.hbase.spark.datasources.Field;
3332
import org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder;
3433
import org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos;
35-
import org.apache.hadoop.hbase.util.ByteStringer;
3634
import org.apache.hadoop.hbase.util.Bytes;
3735
import org.apache.yetus.audience.InterfaceAudience;
3836
import org.slf4j.Logger;
3937
import org.slf4j.LoggerFactory;
4038

39+
import scala.collection.mutable.MutableList;
40+
4141
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
4242
import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
4343

44-
import scala.collection.mutable.MutableList;
45-
4644
/**
4745
* This filter will push down all qualifier logic given to us
4846
* by SparkSQL so that we have make the filters at the region server level

spark/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java

+3-7
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
*/
1818
package org.apache.hadoop.hbase.spark;
1919

20-
import java.io.File;
2120
import java.io.IOException;
2221
import java.io.Serializable;
2322
import java.util.ArrayList;
@@ -63,8 +62,6 @@
6362
import org.slf4j.LoggerFactory;
6463
import scala.Tuple2;
6564

66-
import org.apache.hbase.thirdparty.com.google.common.io.Files;
67-
6865
@Category({MiscTests.class, MediumTests.class})
6966
public class TestJavaHBaseContext implements Serializable {
7067

@@ -133,7 +130,7 @@ public void setUp() throws Exception {
133130

134131
@After
135132
public void tearDown() throws Exception {
136-
TEST_UTIL.deleteTable(TableName.valueOf(tableName));
133+
TEST_UTIL.deleteTable(TableName.valueOf(tableName));
137134
}
138135

139136
@Test
@@ -384,8 +381,8 @@ public void testBulkLoadThinRows() throws Exception {
384381

385382
Configuration conf = TEST_UTIL.getConfiguration();
386383

387-
HBASE_CONTEXT.bulkLoadThinRows(rdd, TableName.valueOf(tableName), new BulkLoadThinRowsFunction(),
388-
output.toString(), new HashMap<byte[], FamilyHFileWriteOptions>(), false,
384+
HBASE_CONTEXT.bulkLoadThinRows(rdd, TableName.valueOf(tableName),
385+
new BulkLoadThinRowsFunction(), output.toString(), new HashMap<>(), false,
389386
HConstants.DEFAULT_MAX_FILE_SIZE);
390387

391388

@@ -524,5 +521,4 @@ private void populateTableWithMockData(Configuration conf, TableName tableName)
524521
table.put(puts);
525522
}
526523
}
527-
528524
}

0 commit comments

Comments
 (0)