HBASE-19606 Fixed Checkstyle errors in hbase-hadoop-compat and enabled Checkstyle to fail on violations

This commit is contained in:
Jan Hentschel 2017-12-23 18:34:57 +01:00
parent 6011641c39
commit 741466a882
20 changed files with 58 additions and 40 deletions

View File

@ -35,5 +35,5 @@
<suppress checks="." files=".*/generated-jamon/.*\.java"/>
<suppress checks="MagicNumberCheck" files=".*/src/test/.*\.java"/>
<suppress checks="VisibilityModifier" files=".*/src/test/.*\.java"/>
<suppress checks="InterfaceIsTypeCheck" files="RSGroupableBalancer.java"/>
<suppress checks="InterfaceIsTypeCheck" files=".*/src/main/.*\.java"/>
</suppressions>

View File

@ -56,7 +56,23 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
</plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<executions>
<execution>
<id>checkstyle</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
<configuration>
<failOnViolation>true</failOnViolation>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<!--This plugin's configuration is used to store Eclipse m2e settings

View File

@ -52,7 +52,8 @@ public interface MetricsAssignmentManagerSource extends BaseSource {
String RIT_COUNT_DESC = "Current number of Regions In Transition (Gauge).";
String RIT_COUNT_OVER_THRESHOLD_DESC =
"Current number of Regions In Transition over threshold time (Gauge).";
String RIT_OLDEST_AGE_DESC = "Timestamp in milliseconds of the oldest Region In Transition (Gauge).";
String RIT_OLDEST_AGE_DESC =
"Timestamp in milliseconds of the oldest Region In Transition (Gauge).";
String RIT_DURATION_DESC =
"Total durations in milliseconds for all Regions in Transition (Histogram).";

View File

@ -65,7 +65,8 @@ public interface MetricsMasterSource extends BaseSource {
String CLUSTER_REQUESTS_NAME = "clusterRequests";
String MASTER_ACTIVE_TIME_DESC = "Master Active Time";
String MASTER_START_TIME_DESC = "Master Start Time";
String MASTER_FINISHED_INITIALIZATION_TIME_DESC = "Timestamp when Master has finished initializing";
String MASTER_FINISHED_INITIALIZATION_TIME_DESC =
"Timestamp when Master has finished initializing";
String AVERAGE_LOAD_DESC = "AverageLoad";
String LIVE_REGION_SERVERS_DESC = "Names of live RegionServers";
String NUMBER_OF_REGION_SERVERS_DESC = "Number of RegionServers";

View File

@ -123,7 +123,8 @@ public interface MetricsHeapMemoryManagerSource extends BaseSource {
// Counters
String DO_NOTHING_COUNTER_NAME = "tunerDoNothingCounter";
String DO_NOTHING_COUNTER_DESC =
"The number of times that tuner neither expands memstore global size limit nor expands blockcache max size";
"The number of times that tuner neither expands memstore global size limit nor expands " +
"blockcache max size";
String ABOVE_HEAP_LOW_WATERMARK_COUNTER_NAME = "aboveHeapOccupancyLowWaterMarkCounter";
String ABOVE_HEAP_LOW_WATERMARK_COUNTER_DESC =
"The number of times that heap occupancy percent is above low watermark";

View File

@ -55,7 +55,7 @@ public interface MetricsRegionServerSource extends BaseSource, JvmPauseMonitorSo
/**
* Update the PutBatch time histogram if a batch contains a Put op
* @param t
* @param t time it took
*/
void updatePutBatch(long t);

View File

@ -118,26 +118,28 @@ public interface MetricsRegionWrapper {
long getNumCompactionsFailed();
/**
* @return the total number of compactions that are currently queued(or being executed) at point in
* time
* @return the total number of compactions that are currently queued(or being executed) at point
* in time
*/
long getNumCompactionsQueued();
/**
* @return the total number of flushes currently queued(being executed) for this region at point in
* time
* @return the total number of flushes currently queued(being executed) for this region at point
* in time
*/
long getNumFlushesQueued();
/**
* Note that this metric is updated periodically and hence might miss some data points.
*
* @return the max number of compactions queued for this region
* Note that this metric is updated periodically and hence might miss some data points
*/
long getMaxCompactionQueueSize();
/**
* Note that this metric is updated periodically and hence might miss some data points.
*
* @return the max number of flushes queued for this region
* Note that this metric is updated periodically and hence might miss some data points
*/
long getMaxFlushQueueSize();

View File

@ -26,8 +26,10 @@ public interface MetricsReplicationSourceSource extends BaseSource {
public static final String SOURCE_AGE_OF_LAST_SHIPPED_OP = "source.ageOfLastShippedOp";
public static final String SOURCE_SHIPPED_BATCHES = "source.shippedBatches";
/**
* @deprecated Use {@link #SOURCE_SHIPPED_BYTES} instead
*/
@Deprecated
/** @deprecated Use SOURCE_SHIPPED_BYTES instead */
public static final String SOURCE_SHIPPED_KBS = "source.shippedKBs";
public static final String SOURCE_SHIPPED_BYTES = "source.shippedBytes";
public static final String SOURCE_SHIPPED_OPS = "source.shippedOps";

View File

@ -84,7 +84,7 @@ public interface MetricsRESTSource extends BaseSource, JvmPauseMonitorSource {
/**
* Increment the number of successful Delete requests.
*
* @param inc
* @param inc number of successful delete requests
*/
void incrementSucessfulDeleteRequests(int inc);

View File

@ -36,7 +36,7 @@ public interface MetricsThriftServerSource extends ExceptionTrackingSource, JvmP
/**
* Add how long an operation was in the queue.
* @param time
* @param time the time to add
*/
void incTimeInQueue(long time);

View File

@ -18,9 +18,10 @@
package org.apache.hadoop.hbase.thrift;
/** Factory that will be used to create metrics sources for the two diffent types of thrift servers. */
/**
* Factory that will be used to create metrics sources for the two diffent types of thrift servers.
*/
public interface MetricsThriftServerSourceFactory {
String METRICS_NAME = "Thrift";
String METRICS_DESCRIPTION = "Thrift Server Metrics";
String THRIFT_ONE_METRICS_CONTEXT = "thrift-one";
@ -33,5 +34,4 @@ public interface MetricsThriftServerSourceFactory {
/** Create a Source for a thrift two server */
MetricsThriftServerSource createThriftTwoSource();
}

View File

@ -49,15 +49,18 @@ public interface MetricsZooKeeperSource extends BaseSource {
String EXCEPTION_CONNECTIONLOSS = "CONNECTIONLOSS Exception";
String EXCEPTION_CONNECTIONLOSS_DESC = "Number of failed ops due to a CONNECTIONLOSS exception.";
String EXCEPTION_DATAINCONSISTENCY = "DATAINCONSISTENCY Exception";
String EXCEPTION_DATAINCONSISTENCY_DESC = "Number of failed ops due to a DATAINCONSISTENCY exception.";
String EXCEPTION_DATAINCONSISTENCY_DESC =
"Number of failed ops due to a DATAINCONSISTENCY exception.";
String EXCEPTION_INVALIDACL = "INVALIDACL Exception";
String EXCEPTION_INVALIDACL_DESC = "Number of failed ops due to an INVALIDACL exception";
String EXCEPTION_NOAUTH = "NOAUTH Exception";
String EXCEPTION_NOAUTH_DESC = "Number of failed ops due to a NOAUTH exception.";
String EXCEPTION_OPERATIONTIMEOUT = "OPERATIONTIMEOUT Exception";
String EXCEPTION_OPERATIONTIMEOUT_DESC = "Number of failed ops due to an OPERATIONTIMEOUT exception.";
String EXCEPTION_OPERATIONTIMEOUT_DESC =
"Number of failed ops due to an OPERATIONTIMEOUT exception.";
String EXCEPTION_RUNTIMEINCONSISTENCY = "RUNTIMEINCONSISTENCY Exception";
String EXCEPTION_RUNTIMEINCONSISTENCY_DESC = "Number of failed ops due to a RUNTIMEINCONSISTENCY exception.";
String EXCEPTION_RUNTIMEINCONSISTENCY_DESC =
"Number of failed ops due to a RUNTIMEINCONSISTENCY exception.";
String EXCEPTION_SESSIONEXPIRED = "SESSIONEXPIRED Exception";
String EXCEPTION_SESSIONEXPIRED_DESC = "Number of failed ops due to a SESSIONEXPIRED exception.";
String EXCEPTION_SYSTEMERROR = "SYSTEMERROR Exception";

View File

@ -40,7 +40,7 @@ public interface MetricHistogram {
/**
* Add a single value to a histogram's stream of values.
* @param value
* @param value the value to add
*/
void add(long value);

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@ -26,15 +29,11 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
@Category({MetricsTests.class, SmallTests.class})
public class TestCompatibilitySingletonFactory {

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -19,9 +19,8 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -19,8 +19,8 @@
package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -19,9 +19,8 @@
package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSource;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -19,9 +19,8 @@
package org.apache.hadoop.hbase.rest;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.rest.MetricsRESTSource;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -30,12 +29,9 @@ import org.junit.experimental.categories.Category;
*/
@Category({MetricsTests.class, SmallTests.class})
public class TestMetricsRESTSource {
@Test(expected=RuntimeException.class)
public void testGetInstanceNoHadoopCompat() throws Exception {
//This should throw an exception because there is no compat lib on the class path.
CompatibilitySingletonFactory.getInstance(MetricsRESTSource.class);
}
}

View File

@ -19,9 +19,8 @@
package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.thrift.MetricsThriftServerSourceFactory;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;