diff --git a/dev-support/findbugs-exclude.xml b/dev-support/findbugs-exclude.xml
index ef2c77d8fb3..da0abc92541 100644
--- a/dev-support/findbugs-exclude.xml
+++ b/dev-support/findbugs-exclude.xml
@@ -36,6 +36,11 @@
+
+
+
+
+
diff --git a/dev-support/test-patch.properties b/dev-support/test-patch.properties
index 7a911825a83..4394d5ab5d9 100644
--- a/dev-support/test-patch.properties
+++ b/dev-support/test-patch.properties
@@ -19,7 +19,7 @@ MAVEN_OPTS="-Xmx3g"
# Please update the per-module test-patch.properties if you update this file.
OK_RELEASEAUDIT_WARNINGS=84
-OK_FINDBUGS_WARNINGS=226
+OK_FINDBUGS_WARNINGS=127
# Allow two warnings. Javadoc complains about sun.misc.Unsafe use. See HBASE-7457
OK_JAVADOC_WARNINGS=2
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 45086d60197..5a3b318c67f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -80,37 +80,37 @@ public class KeyValue implements Cell, HeapSize {
/**
* Comparator for plain key/values; i.e. non-catalog table key/values.
*/
- public static KVComparator COMPARATOR = new KVComparator();
+ public static final KVComparator COMPARATOR = new KVComparator();
/**
* Comparator for plain key; i.e. non-catalog table key. Works on Key portion
* of KeyValue only.
*/
- public static KeyComparator KEY_COMPARATOR = new KeyComparator();
+ public static final KeyComparator KEY_COMPARATOR = new KeyComparator();
/**
* A {@link KVComparator} for .META.
catalog table
* {@link KeyValue}s.
*/
- public static KVComparator META_COMPARATOR = new MetaComparator();
+ public static final KVComparator META_COMPARATOR = new MetaComparator();
/**
* A {@link KVComparator} for .META.
catalog table
* {@link KeyValue} keys.
*/
- public static KeyComparator META_KEY_COMPARATOR = new MetaKeyComparator();
+ public static final KeyComparator META_KEY_COMPARATOR = new MetaKeyComparator();
/**
* A {@link KVComparator} for -ROOT-
catalog table
* {@link KeyValue}s.
*/
- public static KVComparator ROOT_COMPARATOR = new RootComparator();
+ public static final KVComparator ROOT_COMPARATOR = new RootComparator();
/**
* A {@link KVComparator} for -ROOT-
catalog table
* {@link KeyValue} keys.
*/
- public static KeyComparator ROOT_KEY_COMPARATOR = new RootKeyComparator();
+ public static final KeyComparator ROOT_KEY_COMPARATOR = new RootKeyComparator();
/**
* Get the appropriate row comparator for the specified table.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index 7a9790da9a2..7e9d6deee9b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -34,7 +34,6 @@ import java.lang.reflect.Method;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.util.Shell;
/**
* This class is a wrapper for the implementation of
@@ -127,6 +126,7 @@ public class JVM
ofdc = runUnixMXBeanMethod("getOpenFileDescriptorCount");
return (ofdc != null ? ofdc.longValue () : -1);
}
+ InputStream in = null;
try {
//need to get the PID number of the process first
RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean();
@@ -137,7 +137,7 @@ public class JVM
Process p = Runtime.getRuntime().exec(
new String[] { "bash", "-c",
"ls /proc/" + pidhost[0] + "/fdinfo | wc -l" });
- InputStream in = p.getInputStream();
+ in = p.getInputStream();
BufferedReader output = new BufferedReader(
new InputStreamReader(in));
@@ -146,6 +146,14 @@ public class JVM
return Long.parseLong(openFileDesCount);
} catch (IOException ie) {
LOG.warn("Not able to get the number of open file descriptors", ie);
+ } finally {
+ if (in != null){
+ try {
+ in.close();
+ } catch (IOException e) {
+ LOG.warn("Not able to close the InputStream", e);
+ }
+ }
}
return -1;
}
@@ -164,13 +172,14 @@ public class JVM
mfdc = runUnixMXBeanMethod("getMaxFileDescriptorCount");
return (mfdc != null ? mfdc.longValue () : -1);
}
+ InputStream in = null;
try {
//using linux bash commands to retrieve info
Process p = Runtime.getRuntime().exec(
new String[] { "bash", "-c",
"ulimit -n" });
- InputStream in = p.getInputStream();
+ in = p.getInputStream();
BufferedReader output = new BufferedReader(
new InputStreamReader(in));
@@ -179,6 +188,14 @@ public class JVM
return Long.parseLong(maxFileDesCount);
} catch (IOException ie) {
LOG.warn("Not able to get the max number of file descriptors", ie);
+ } finally {
+ if (in != null){
+ try {
+ in.close();
+ } catch (IOException e) {
+ LOG.warn("Not able to close the InputStream", e);
+ }
+ }
}
return -1;
}
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index 2e4eb48e78d..f4c8930200b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -63,8 +63,8 @@ import org.apache.zookeeper.ZooKeeper;
* listeners registered with ZooKeeperWatcher cannot be removed.
*/
public class ZooKeeperScanPolicyObserver extends BaseRegionObserver {
- public static String node = "/backup/example/lastbackup";
- public static String zkkey = "ZK";
+ public static final String node = "/backup/example/lastbackup";
+ public static final String zkkey = "ZK";
private static final Log LOG = LogFactory.getLog(ZooKeeperScanPolicyObserver.class);
/**
@@ -93,6 +93,7 @@ public class ZooKeeperScanPolicyObserver extends BaseRegionObserver {
*
* @return the last know version of the data
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION")
public byte[] getData() {
// try at most twice/minute
if (needSetup && EnvironmentEdgeManager.currentTimeMillis() > lastSetupTry + 30000) {
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index e8f57b68e9b..d3eaf7dd552 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -127,7 +127,7 @@ public class DemoClient {
// Create the demo table with two column families, entry: and unused:
//
ArrayList columns = new ArrayList();
- ColumnDescriptor col = null;
+ ColumnDescriptor col;
col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(bytes("entry:"));
col.maxVersions = 10;
@@ -227,7 +227,7 @@ public class DemoClient {
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
- Mutation m = null;
+ Mutation m;
mutations = new ArrayList();
m = new Mutation();
m.column = ByteBuffer.wrap(bytes("entry:foo"));
@@ -272,7 +272,7 @@ public class DemoClient {
}
List result = client.get(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:foo")), dummyAttributes);
- if (result.isEmpty() == false) {
+ if (!result.isEmpty()) {
System.out.println("FATAL: shouldn't get here");
System.exit(-1);
}
@@ -305,7 +305,7 @@ public class DemoClient {
transport.close();
}
- private final void printVersions(ByteBuffer row, List versions) {
+ private void printVersions(ByteBuffer row, List versions) {
StringBuilder rowStr = new StringBuilder();
for (TCell cell : versions) {
rowStr.append(utf8(cell.value.array()));
@@ -314,7 +314,7 @@ public class DemoClient {
System.out.println("row: " + utf8(row.array()) + ", values: " + rowStr);
}
- private final void printRow(TRowResult rowResult) {
+ private void printRow(TRowResult rowResult) {
// copy values into a TreeMap to get them in sorted order
TreeMap sorted = new TreeMap();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
index b45c04a08bd..4226c3f5cac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
@@ -22,9 +22,6 @@ package org.apache.hadoop.hbase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Chore;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus;
import org.apache.hadoop.util.StringUtils;
@@ -51,7 +48,7 @@ import org.apache.hadoop.util.StringUtils;
healthChecker.init(healthCheckScript, scriptTimeout);
this.threshold = config.getInt(HConstants.HEALTH_FAILURE_THRESHOLD,
HConstants.DEFAULT_HEALTH_FAILURE_THRESHOLD);
- this.failureWindow = this.threshold * sleepTime;
+ this.failureWindow = (long)this.threshold * (long)sleepTime;
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ServerName.java
index 348ca2f04f2..f3ae6b50e81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -71,7 +71,7 @@ public class ServerName implements Comparable {
*/
public static final String SERVERNAME_SEPARATOR = ",";
- public static Pattern SERVERNAME_PATTERN =
+ public static final Pattern SERVERNAME_PATTERN =
Pattern.compile("[^" + SERVERNAME_SEPARATOR + "]+" +
SERVERNAME_SEPARATOR + Addressing.VALID_PORT_REGEX +
SERVERNAME_SEPARATOR + Addressing.VALID_PORT_REGEX + "$");
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
index 52d50aadc77..5c6f532c4f7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
@@ -35,7 +35,7 @@ public abstract class OperationWithAttributes extends Operation implements Attri
private Map attributes;
// used for uniquely identifying an operation
- static public String ID_ATRIBUTE = "_operation.attributes.id";
+ public static final String ID_ATRIBUTE = "_operation.attributes.id";
public void setAttribute(String name, byte[] value) {
if (attributes == null && value == null) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 23bbf186ab9..79b2e0696f0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -95,9 +95,9 @@ public class Scan extends OperationWithAttributes {
// If application wants to collect scan metrics, it needs to
// call scan.setAttribute(SCAN_ATTRIBUTES_ENABLE, Bytes.toBytes(Boolean.TRUE))
- static public String SCAN_ATTRIBUTES_METRICS_ENABLE =
+ static public final String SCAN_ATTRIBUTES_METRICS_ENABLE =
"scan.attributes.metrics.enable";
- static public String SCAN_ATTRIBUTES_METRICS_DATA =
+ static public final String SCAN_ATTRIBUTES_METRICS_DATA =
"scan.attributes.metrics.data";
/*
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index 77bef274148..f7fc6708434 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -67,7 +67,7 @@ import com.google.protobuf.Service;
@InterfaceStability.Evolving
public class AggregateImplementation
extends AggregateService implements CoprocessorService, Coprocessor {
- protected static Log log = LogFactory.getLog(AggregateImplementation.class);
+ protected static final Log log = LogFactory.getLog(AggregateImplementation.class);
private RegionCoprocessorEnvironment env;
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
index 8d546e6eb50..d5deaa9d0b2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
@@ -67,7 +67,7 @@ public abstract class EventHandler implements Runnable, Comparable {
protected Server server;
// sequence id generator for default FIFO ordering of events
- protected static AtomicLong seqids = new AtomicLong(0);
+ protected static final AtomicLong seqids = new AtomicLong(0);
// sequence id for this event
private final long seqid;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 126c6eb0be4..50fb1cfe1a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -532,6 +532,16 @@ public class LruBlockCache implements BlockCache, HeapSize {
if(this.overflow() == that.overflow()) return 0;
return this.overflow() > that.overflow() ? 1 : -1;
}
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null || !(that instanceof BlockBucket)){
+ return false;
+ }
+
+ return compareTo(( BlockBucket)that) == 0;
+ }
+
}
/**
@@ -625,13 +635,13 @@ public class LruBlockCache implements BlockCache, HeapSize {
public void evict() {
synchronized(this) {
- this.notify(); // FindBugs NN_NAKED_NOTIFY
+ this.notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
}
- void shutdown() {
+ synchronized void shutdown() {
this.go = false;
- interrupt();
+ this.notifyAll();
}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java
index 2cbdd832e17..7fa852e89e4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.regionserver.wal.HLogUtil;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputFormat;
@@ -49,10 +48,10 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
*/
@InterfaceAudience.Public
public class HLogInputFormat extends InputFormat {
- private static Log LOG = LogFactory.getLog(HLogInputFormat.class);
+ private static final Log LOG = LogFactory.getLog(HLogInputFormat.class);
- public static String START_TIME_KEY = "hlog.start.time";
- public static String END_TIME_KEY = "hlog.end.time";
+ public static final String START_TIME_KEY = "hlog.start.time";
+ public static final String END_TIME_KEY = "hlog.end.time";
/**
* {@link InputSplit} for {@link HLog} files. Each split represent
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 88aa8d6566e..c703b9ea75a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -91,13 +91,13 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class LoadIncrementalHFiles extends Configured implements Tool {
- private static Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class);
- static AtomicLong regionCount = new AtomicLong(0);
+ private static final Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class);
+ static final AtomicLong regionCount = new AtomicLong(0);
private HBaseAdmin hbAdmin;
private Configuration cfg;
- public static String NAME = "completebulkload";
- private static String ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
+ public static final String NAME = "completebulkload";
+ private static final String ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
private boolean assignSeqIds;
public LoadIncrementalHFiles(Configuration conf) throws Exception {
@@ -626,11 +626,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
}
HTableDescriptor htd = new HTableDescriptor(tableName);
- HColumnDescriptor hcd = null;
+ HColumnDescriptor hcd;
// Add column families
// Build a set of keys
- byte[][] keys = null;
+ byte[][] keys;
TreeMap map = new TreeMap(Bytes.BYTES_COMPARATOR);
for (FileStatus stat : familyDirStatuses) {
@@ -667,10 +667,10 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
" last=" + Bytes.toStringBinary(last));
// To eventually infer start key-end key boundaries
- Integer value = map.containsKey(first)?(Integer)map.get(first):0;
+ Integer value = map.containsKey(first)? map.get(first):0;
map.put(first, value+1);
- value = map.containsKey(last)?(Integer)map.get(last):0;
+ value = map.containsKey(last)? map.get(last):0;
map.put(last, value-1);
} finally {
reader.close();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java
index 292ee76cb7a..285f4afb210 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java
@@ -46,6 +46,7 @@ public class SplitRegionHandler extends EventHandler implements TotesHRegionInfo
/**
* For testing only! Set to true to skip handling of split.
*/
+ @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_SHOULD_BE_FINAL")
public static boolean TEST_SKIP = false;
public SplitRegionHandler(Server server,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
index 0cdcbd533c8..313b7626c63 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
public class KeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy {
private static final Log LOG = LogFactory
.getLog(KeyPrefixRegionSplitPolicy.class);
- public static String PREFIX_LENGTH_KEY = "prefix_split_key_policy.prefix_length";
+ public static final String PREFIX_LENGTH_KEY = "prefix_split_key_policy.prefix_length";
private int prefixLength = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index a7a3bc92dac..308252640b7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -49,8 +49,8 @@ public class Permission extends VersionedWritable {
public byte code() { return code; }
}
- private static Log LOG = LogFactory.getLog(Permission.class);
- protected static Map ACTION_BY_CODE = Maps.newHashMap();
+ private static final Log LOG = LogFactory.getLog(Permission.class);
+ protected static final Map ACTION_BY_CODE = Maps.newHashMap();
protected Action[] actions;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
index 3aba9991154..8894cb53aac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
@@ -27,7 +27,7 @@ import org.apache.zookeeper.WatchedEvent;
@InterfaceAudience.Private
public class EmptyWatcher implements Watcher {
// Used in this package but also by tests so needs to be public
- public static EmptyWatcher instance = new EmptyWatcher();
+ public static final EmptyWatcher instance = new EmptyWatcher();
private EmptyWatcher() {}
public void process(WatchedEvent event) {}
diff --git a/pom.xml b/pom.xml
index d18815c9651..1a0d8096677 100644
--- a/pom.xml
+++ b/pom.xml
@@ -883,7 +883,7 @@
1.6
2.3.4
2.5.2
- 2.0.1
+ 1.3.9-1
3.1
2.9
2.5
@@ -1263,15 +1263,9 @@
- com.google.code.findbugs
- annotations
- ${findbugs.version}
- compile
-
-
- com.google.code.findbugs
- jsr305
- ${findbugs.version}
+ com.github.stephenc.findbugs
+ findbugs-annotations
+ ${findbugs-annotations}
compile