diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java index 71999ad269f..fd9936dc502 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.exceptions; +import com.google.errorprone.annotations.RestrictedApi; import java.io.EOFException; import java.io.IOException; import java.io.SyncFailedException; @@ -120,6 +121,10 @@ public final class ClientExceptionsUtil { * For test only. Usually you should use the {@link #isConnectionException(Throwable)} method * below. */ + @RestrictedApi(explanation = "Should only be called in tests", link = "", + allowedOnPath = ".*/src/test/.*") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "test only") public static Set> getConnectionExceptionTypes() { return CONNECTION_EXCEPTION_TYPES; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java index 13e85f91c95..f678a43986d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java @@ -839,32 +839,20 @@ public final class RequestConverter { return builder.build(); } - /** - * @see #buildRollWALWriterRequest() - */ - private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST = - RollWALWriterRequest.newBuilder().build(); - /** * Create a new RollWALWriterRequest * @return a ReplicateWALEntryRequest */ public static RollWALWriterRequest buildRollWALWriterRequest() { - return ROLL_WAL_WRITER_REQUEST; + return RollWALWriterRequest.getDefaultInstance(); } - /** - * @see #buildGetServerInfoRequest() - */ - private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = - GetServerInfoRequest.newBuilder().build(); - /** * Create a new GetServerInfoRequest * @return a GetServerInfoRequest */ public static GetServerInfoRequest buildGetServerInfoRequest() { - return GET_SERVER_INFO_REQUEST; + return GetServerInfoRequest.getDefaultInstance(); } /** @@ -1241,18 +1229,12 @@ public final class RequestConverter { .addAllOptions(ClusterMetricsBuilder.toOptions(options)).build(); } - /** - * @see #buildCatalogScanRequest - */ - private static final RunCatalogScanRequest CATALOG_SCAN_REQUEST = - RunCatalogScanRequest.newBuilder().build(); - /** * Creates a request for running a catalog scan * @return A {@link RunCatalogScanRequest} */ public static RunCatalogScanRequest buildCatalogScanRequest() { - return CATALOG_SCAN_REQUEST; + return RunCatalogScanRequest.getDefaultInstance(); } /** @@ -1263,32 +1245,20 @@ public final class RequestConverter { return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build(); } - /** - * @see #buildIsCatalogJanitorEnabledRequest() - */ - private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST = - IsCatalogJanitorEnabledRequest.newBuilder().build(); - /** * Creates a request for querying the master whether the catalog janitor is enabled * @return A {@link IsCatalogJanitorEnabledRequest} */ public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() { - return IS_CATALOG_JANITOR_ENABLED_REQUEST; + return IsCatalogJanitorEnabledRequest.getDefaultInstance(); } - /** - * @see #buildRunCleanerChoreRequest() - */ - private static final RunCleanerChoreRequest CLEANER_CHORE_REQUEST = - RunCleanerChoreRequest.newBuilder().build(); - /** * Creates a request for running cleaner chore * @return A {@link RunCleanerChoreRequest} */ public static RunCleanerChoreRequest buildRunCleanerChoreRequest() { - return CLEANER_CHORE_REQUEST; + return RunCleanerChoreRequest.getDefaultInstance(); } /** @@ -1299,18 +1269,12 @@ public final class RequestConverter { return SetCleanerChoreRunningRequest.newBuilder().setOn(on).build(); } - /** - * @see #buildIsCleanerChoreEnabledRequest() - */ - private static final IsCleanerChoreEnabledRequest IS_CLEANER_CHORE_ENABLED_REQUEST = - IsCleanerChoreEnabledRequest.newBuilder().build(); - /** * Creates a request for querying the master whether the cleaner chore is enabled * @return A {@link IsCleanerChoreEnabledRequest} */ public static IsCleanerChoreEnabledRequest buildIsCleanerChoreEnabledRequest() { - return IS_CLEANER_CHORE_ENABLED_REQUEST; + return IsCleanerChoreEnabledRequest.getDefaultInstance(); } /** @@ -1530,34 +1494,25 @@ public final class RequestConverter { return builder.build(); } - private static final GetSpaceQuotaRegionSizesRequest GET_SPACE_QUOTA_REGION_SIZES_REQUEST = - GetSpaceQuotaRegionSizesRequest.newBuilder().build(); - /** * Returns a {@link GetSpaceQuotaRegionSizesRequest} object. */ public static GetSpaceQuotaRegionSizesRequest buildGetSpaceQuotaRegionSizesRequest() { - return GET_SPACE_QUOTA_REGION_SIZES_REQUEST; + return GetSpaceQuotaRegionSizesRequest.getDefaultInstance(); } - private static final GetSpaceQuotaSnapshotsRequest GET_SPACE_QUOTA_SNAPSHOTS_REQUEST = - GetSpaceQuotaSnapshotsRequest.newBuilder().build(); - /** * Returns a {@link GetSpaceQuotaSnapshotsRequest} object. */ public static GetSpaceQuotaSnapshotsRequest buildGetSpaceQuotaSnapshotsRequest() { - return GET_SPACE_QUOTA_SNAPSHOTS_REQUEST; + return GetSpaceQuotaSnapshotsRequest.getDefaultInstance(); } - private static final GetQuotaStatesRequest GET_QUOTA_STATES_REQUEST = - GetQuotaStatesRequest.newBuilder().build(); - /** * Returns a {@link GetQuotaStatesRequest} object. */ public static GetQuotaStatesRequest buildGetQuotaStatesRequest() { - return GET_QUOTA_STATES_REQUEST; + return GetQuotaStatesRequest.getDefaultInstance(); } public static DecommissionRegionServersRequest diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java index 8eb4bdea71b..7f5c58883f2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java @@ -30,6 +30,8 @@ public final class CryptoCipherProvider implements CipherProvider { private static CryptoCipherProvider instance; + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static CryptoCipherProvider getInstance() { if (instance != null) { return instance; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java index 33e19575d94..a4c3f3b7ca8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java @@ -30,6 +30,8 @@ public final class DefaultCipherProvider implements CipherProvider { private static DefaultCipherProvider instance; + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static DefaultCipherProvider getInstance() { if (instance != null) { return instance; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/BlockIOUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/BlockIOUtils.java index 86c6317556b..9641c72dfbc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/BlockIOUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/BlockIOUtils.java @@ -367,10 +367,6 @@ public final class BlockIOUtils { } catch (InvocationTargetException e) { throw new IOException("Encountered an exception when invoking ByteBuffer positioned read" + " when trying to read " + bytesRead + " bytes from position " + position, e); - } catch (NullPointerException e) { - throw new IOException("something is null"); - } catch (Exception e) { - throw e; } if (ret < 0) { throw new IOException("Premature EOF from inputStream (positional read returned " + ret diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java index 417c59c7d02..d4f3f16478f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java @@ -19,14 +19,14 @@ package org.apache.hadoop.hbase.security; import java.io.IOException; import java.util.Collection; -import java.util.HashSet; -import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; + /** * Keeps lists of superusers and super groups loaded from HBase configuration, checks if certain * user is regarded as superuser. @@ -38,8 +38,8 @@ public final class Superusers { /** Configuration key for superusers */ public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name - private static Set superUsers; - private static Set superGroups; + private static ImmutableSet superUsers; + private static ImmutableSet superGroups; private static User systemUser; private Superusers() { @@ -53,8 +53,8 @@ public final class Superusers { * @throws IllegalStateException if current user is null */ public static void initialize(Configuration conf) throws IOException { - superUsers = new HashSet<>(); - superGroups = new HashSet<>(); + ImmutableSet.Builder superUsersBuilder = ImmutableSet.builder(); + ImmutableSet.Builder superGroupsBuilder = ImmutableSet.builder(); systemUser = User.getCurrent(); if (systemUser == null) { @@ -64,17 +64,19 @@ public final class Superusers { String currentUser = systemUser.getShortName(); LOG.trace("Current user name is {}", currentUser); - superUsers.add(currentUser); + superUsersBuilder.add(currentUser); String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, new String[0]); for (String name : superUserList) { if (AuthUtil.isGroupPrincipal(name)) { // Let's keep the '@' for distinguishing from user. - superGroups.add(name); + superGroupsBuilder.add(name); } else { - superUsers.add(name); + superUsersBuilder.add(name); } } + superUsers = superUsersBuilder.build(); + superGroups = superGroupsBuilder.build(); } /** @@ -113,14 +115,20 @@ public final class Superusers { return superUsers.contains(user) || superGroups.contains(user); } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "immutable") public static Collection getSuperUsers() { return superUsers; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "immutable") public static Collection getSuperGroups() { return superGroups; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "by design") public static User getSystemUser() { return systemUser; } diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index ee7a61ed931..ad8d2367649 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -76,6 +76,12 @@ io.dropwizard.metrics metrics-core + + com.github.stephenc.findbugs + findbugs-annotations + compile + true + junit junit diff --git a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java index b1b47e3904e..768435d1eb1 100644 --- a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java +++ b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java @@ -138,6 +138,8 @@ public class FastLongHistogram { /** * Computes the quantiles give the ratios. */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "FL_FLOATS_AS_LOOP_COUNTERS", + justification = "valid usage") public long[] getQuantiles(double[] quantiles) { if (!hasData) { // No data yet. @@ -266,10 +268,6 @@ public class FastLongHistogram { this.bins = new Bins(bins, numOfBins, 0.01, 0.999); } - private FastLongHistogram(Bins bins) { - this.bins = bins; - } - /** * Adds a value to the histogram. */ diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java index 66834f36176..79760aead9d 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java @@ -53,6 +53,8 @@ public class RESTServlet implements Constants { } /** Returns the RESTServlet singleton instance */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static RESTServlet getInstance() { assert (INSTANCE != null); return INSTANCE; @@ -66,8 +68,10 @@ public class RESTServlet implements Constants { /** * @param conf Existing configuration to use in rest servlet * @param userProvider the login user provider - * @return the RESTServlet singleton instance n + * @return the RESTServlet singleton instance */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static RESTServlet getInstance(Configuration conf, UserProvider userProvider) throws IOException { if (INSTANCE == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java index 8fbaa775391..86f8f935334 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java @@ -117,12 +117,10 @@ public class FileLink { res = in.read(); } catch (FileNotFoundException e) { res = tryOpen().read(); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().read(); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().read(); } - if (res > 0) pos += 1; + if (res > 0) { + pos += 1; + } return res; } @@ -138,12 +136,10 @@ public class FileLink { n = in.read(b, off, len); } catch (FileNotFoundException e) { n = tryOpen().read(b, off, len); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(b, off, len); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(b, off, len); } - if (n > 0) pos += n; + if (n > 0) { + pos += n; + } assert (in.getPos() == pos); return n; } @@ -155,10 +151,6 @@ public class FileLink { n = in.read(position, buffer, offset, length); } catch (FileNotFoundException e) { n = tryOpen().read(position, buffer, offset, length); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(position, buffer, offset, length); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(position, buffer, offset, length); } return n; } @@ -174,10 +166,6 @@ public class FileLink { in.readFully(position, buffer, offset, length); } catch (FileNotFoundException e) { tryOpen().readFully(position, buffer, offset, length); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().readFully(position, buffer, offset, length); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().readFully(position, buffer, offset, length); } } @@ -189,13 +177,11 @@ public class FileLink { skipped = in.skip(n); } catch (FileNotFoundException e) { skipped = tryOpen().skip(n); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - skipped = tryOpen().skip(n); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - skipped = tryOpen().skip(n); } - if (skipped > 0) pos += skipped; + if (skipped > 0) { + pos += skipped; + } return skipped; } @@ -205,10 +191,6 @@ public class FileLink { return in.available(); } catch (FileNotFoundException e) { return tryOpen().available(); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - return tryOpen().available(); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - return tryOpen().available(); } } @@ -218,10 +200,6 @@ public class FileLink { in.seek(pos); } catch (FileNotFoundException e) { tryOpen().seek(pos); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().seek(pos); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().seek(pos); } this.pos = pos; } @@ -238,10 +216,6 @@ public class FileLink { res = in.seekToNewSource(targetPos); } catch (FileNotFoundException e) { res = tryOpen().seekToNewSource(targetPos); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().seekToNewSource(targetPos); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().seekToNewSource(targetPos); } if (res) pos = targetPos; return res; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index b358ad60689..134485135ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -388,7 +388,7 @@ public class FixedFileTrailer { bufferSize = (int) fileSize; } - HFileUtil.seekOnMultipleSources(istream, seekPoint); + istream.seek(seekPoint); ByteBuffer buf = ByteBuffer.allocate(bufferSize); istream.readFully(buf.array(), buf.arrayOffset(), buf.arrayOffset() + buf.limit()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 8e04580874f..097aaff27c8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -1471,7 +1471,7 @@ public class HFileBlock implements Cacheable { boolean peekIntoNextBlock, long fileOffset, boolean pread) throws IOException { if (!pread) { // Seek + read. Better for scanning. - HFileUtil.seekOnMultipleSources(istream, fileOffset); + istream.seek(fileOffset); long realOffset = istream.getPos(); if (realOffset != fileOffset) { throw new IOException("Tried to seek to " + fileOffset + " to read " + size diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java index 25627c34f51..98401c46bee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java @@ -73,10 +73,6 @@ public class HFilePreadReader extends HFileReaderImpl { if (LOG.isTraceEnabled()) { LOG.trace("Prefetch " + getPathOffsetEndStr(path, offset, end), e); } - } catch (NullPointerException e) { - LOG.warn( - "Stream moved/closed or prefetch cancelled?" + getPathOffsetEndStr(path, offset, end), - e); } catch (Exception e) { // Other exceptions are interesting LOG.warn("Prefetch " + getPathOffsetEndStr(path, offset, end), e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java deleted file mode 100644 index 612f127e11e..00000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io.hfile; - -import java.io.IOException; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.yetus.audience.InterfaceAudience; - -@InterfaceAudience.Private -class HFileUtil { - - /** - * guards against NullPointer utility which tries to seek on the DFSIS and will try an alternative - * source if the FSDataInputStream throws an NPE HBASE-17501 nnn - */ - static public void seekOnMultipleSources(FSDataInputStream istream, long offset) - throws IOException { - try { - // attempt to seek inside of current blockReader - istream.seek(offset); - } catch (NullPointerException e) { - // retry the seek on an alternate copy of the data - // this can occur if the blockReader on the DFSInputStream is null - istream.seekToNewSource(offset); - } - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java index 8f10a0f9782..9aafe7a7b6e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java @@ -145,6 +145,8 @@ public final class PrefetchExecutor { return true; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "OBL_UNSATISFIED_OBLIGATION", + justification = "false positive, try-with-resources ensures close is called.") public static void persistToFile(String path) throws IOException { prefetchedFileListPath = path; if (prefetchedFileListPath == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java index 38f63fd09be..efe512b1a85 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java @@ -43,7 +43,7 @@ public class NamedQueueRecorder { private final Disruptor disruptor; private final LogEventHandler logEventHandler; - private static NamedQueueRecorder namedQueueRecorder; + private static volatile NamedQueueRecorder namedQueueRecorder; private static boolean isInit = false; private static final Object LOCK = new Object(); @@ -71,6 +71,8 @@ public class NamedQueueRecorder { this.disruptor.start(); } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static NamedQueueRecorder getInstance(Configuration conf) { if (namedQueueRecorder != null) { return namedQueueRecorder; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java index dcc32d766b9..cb463b8729d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java @@ -32,6 +32,8 @@ public final class NoOpRegionSizeStore implements RegionSizeStore { private NoOpRegionSizeStore() { } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static NoOpRegionSizeStore getInstance() { return INSTANCE; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java index c747d0c8b3d..732318ac870 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java @@ -39,6 +39,8 @@ public final class MissingSnapshotViolationPolicyEnforcement private MissingSnapshotViolationPolicyEnforcement() { } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static SpaceViolationPolicyEnforcement getInstance() { return SINGLETON; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java index a0dbf4e59e9..ba9cd9d13ec 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java @@ -114,7 +114,8 @@ public class ChunkCreator { * @param heapMemoryManager the heapmemory manager * @return singleton MSLABChunkCreator */ - @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "LI_LAZY_INIT_STATIC", + @edu.umd.cs.findbugs.annotations.SuppressWarnings( + value = { "LI_LAZY_INIT_STATIC", "MS_EXPOSE_REP" }, justification = "Method is called by single thread at the starting of RS") public static ChunkCreator initialize(int chunkSize, boolean offheap, long globalMemStoreSize, float poolSizePercentage, float initialCountPercentage, HeapMemoryManager heapMemoryManager, @@ -127,6 +128,8 @@ public class ChunkCreator { return instance; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static ChunkCreator getInstance() { return instance; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index 13b7cc022bb..b5396110db2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -400,7 +400,7 @@ public class HMobStore extends HStore { private MobCell readCell(List locations, String fileName, Cell search, boolean cacheMobBlocks, long readPt, boolean readEmptyValueOnMobCellMiss) throws IOException { FileSystem fs = getFileSystem(); - Throwable throwable = null; + IOException ioe = null; for (Path location : locations) { MobFile file = null; Path path = new Path(location, fileName); @@ -411,7 +411,7 @@ public class HMobStore extends HStore { : file.readCell(search, cacheMobBlocks); } catch (IOException e) { mobFileCache.evictFile(fileName); - throwable = e; + ioe = e; if ( (e instanceof FileNotFoundException) || (e.getCause() instanceof FileNotFoundException) ) { @@ -422,14 +422,6 @@ public class HMobStore extends HStore { } else { throw e; } - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - mobFileCache.evictFile(fileName); - LOG.debug("Fail to read the cell", e); - throwable = e; - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - mobFileCache.evictFile(fileName); - LOG.debug("Fail to read the cell", e); - throwable = e; } finally { if (file != null) { mobFileCache.closeFile(file); @@ -441,18 +433,15 @@ public class HMobStore extends HStore { if (readEmptyValueOnMobCellMiss) { return null; } else if ( - (throwable instanceof FileNotFoundException) - || (throwable.getCause() instanceof FileNotFoundException) + (ioe instanceof FileNotFoundException) || (ioe.getCause() instanceof FileNotFoundException) ) { // The region is re-opened when FileNotFoundException is thrown. // This is not necessary when MOB files cannot be found, because the store files // in a region only contain the references to MOB files and a re-open on a region // doesn't help fix the lost MOB files. - throw new DoNotRetryIOException(throwable); - } else if (throwable instanceof IOException) { - throw (IOException) throwable; + throw new DoNotRetryIOException(ioe); } else { - throw new IOException(throwable); + throw ioe; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java index 5b92cc07b57..94c97374305 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java @@ -45,9 +45,11 @@ public class NoLimitScannerContext extends ScannerContext { private static final ScannerContext NO_LIMIT = new NoLimitScannerContext(); /** - * @return The static, immutable instance of {@link NoLimitScannerContext} to be used whenever - * limits should not be enforced + * Returns the static, immutable instance of {@link NoLimitScannerContext} to be used whenever + * limits should not be enforced */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static final ScannerContext getInstance() { return NO_LIMIT; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java index adcd41f3e26..f5be2b38038 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java @@ -421,6 +421,8 @@ public class StripeCompactionPolicy extends CompactionPolicy { return totalSize; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "FL_FLOATS_AS_LOOP_COUNTERS", + justification = "valid usage") private Pair estimateTargetKvs(Collection files, double splitCount) { // If the size is larger than what we target, we don't want to split into proportionally // larger parts and then have to split again very soon. So, we will increase the multiplier @@ -433,7 +435,10 @@ public class StripeCompactionPolicy extends CompactionPolicy { while (ratio > 1.0) { // Ratio of real to desired size if we increase the multiplier. double newRatio = totalSize / ((splitCount + 1.0) * targetPartSize); - if ((1.0 / newRatio) >= ratio) break; // New ratio is < 1.0, but further than the last one. + if ((1.0 / newRatio) >= ratio) { + // New ratio is < 1.0, but further than the last one. + break; + } ratio = newRatio; splitCount += 1.0; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index a94449fe01e..212788c940e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -66,15 +66,10 @@ public class ProtobufLogWriter extends AbstractProtobufLogWriter implements FSHL @Override public void close() throws IOException { if (this.output != null) { - try { - if (!trailerWritten) { - writeWALTrailer(); - } - this.output.close(); - } catch (NullPointerException npe) { - // Can get a NPE coming up from down in DFSClient$DFSOutputStream#close - LOG.warn(npe.toString(), npe); + if (!trailerWritten) { + writeWALTrailer(); } + this.output.close(); this.output = null; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java index 1d9f868e52f..42509eaebff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java @@ -321,6 +321,8 @@ class WALEntryStream implements Closeable { } } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") private void openReader(Path path) throws IOException { try { // Detect if this is a new file, if so get a new reader else @@ -371,6 +373,8 @@ class WALEntryStream implements Closeable { } } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") private void resetReader() throws IOException { try { currentEntry = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java index 1eb88a3d12f..51961a92370 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java @@ -98,12 +98,7 @@ public class FsDelegationToken { userToken = userProvider.getCurrent().getToken(tokenKind, fs.getCanonicalServiceName()); if (userToken == null) { hasForwardedToken = false; - try { - userToken = fs.getDelegationToken(renewer); - } catch (NullPointerException npe) { - // we need to handle NullPointerException in case HADOOP-10009 is missing - LOG.error("Failed to get token for " + renewer); - } + userToken = fs.getDelegationToken(renewer); } else { hasForwardedToken = true; LOG.info("Use the existing token: " + userToken); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java index 22168c5513f..4b1d2f4d84f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java @@ -75,9 +75,11 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider { } /** - * Creates the singleton instance, if not yet present, and returns the same. nn * @return - * Singleton instance of VisibilityLabelsCache n + * Creates the singleton instance, if not yet present, and returns the same. + * @return Singleton instance of VisibilityLabelsCache */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static VisibilityLabelsCache createAndGet(ZKWatcher watcher, Configuration conf) throws IOException { // VisibilityLabelService#init() for different regions (in same RS) passes same instance of @@ -96,6 +98,8 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider { * @return Singleton instance of VisibilityLabelsCache n * when this is called before calling * {@link #createAndGet(ZKWatcher, Configuration)} */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static VisibilityLabelsCache get() { // By the time this method is called, the singleton instance of VisibilityLabelsCache should // have been created. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java index b35e8258ddf..7b8a5cd241a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java @@ -76,6 +76,8 @@ public final class BloomFilterUtil { * This gets used in {@link #contains(ByteBuff, int, int, Hash, int, HashKey)} * @param random The random number source to use, or null to compute actual hashes */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "EI_EXPOSE_STATIC_REP2", + justification = "ignore for now, improve TestCompoundBloomFilter later") public static void setRandomGeneratorForTest(Random random) { randomGeneratorForTest = random; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 41505b92c6a..75572b4c53e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -983,12 +983,9 @@ public class HBaseFsck extends Configured implements Closeable { start = CellUtil.cloneRow(startKv.get()); Optional endKv = hf.getLastKey(); end = CellUtil.cloneRow(endKv.get()); - } catch (IOException ioe) { + } catch (Exception ioe) { LOG.warn("Problem reading orphan file " + hfile + ", skipping"); continue; - } catch (NullPointerException ioe) { - LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping"); - continue; } finally { if (hf != null) { hf.close(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java index be9fb23fa3c..32700950460 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java @@ -504,8 +504,9 @@ public abstract class AbstractFSWALProvider> implemen * @param conf configuration * @return WAL Reader instance */ - public static org.apache.hadoop.hbase.wal.WAL.Reader openReader(Path path, Configuration conf) - throws IOException { + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") + public static WAL.Reader openReader(Path path, Configuration conf) throws IOException { long retryInterval = 2000; // 2 sec int maxAttempts = 30; int attempt = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java index 5e72464ecb5..92db27fdbe3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java @@ -210,8 +210,6 @@ public class SingleProcessHBaseCluster extends HBaseClusterInterface { try { LOG.info("Hook closing fs=" + this.fs); this.fs.close(); - } catch (NullPointerException npe) { - LOG.debug("Need to fix these: " + npe.toString()); } catch (IOException e) { LOG.warn("Running hook", e); } diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java index 8e66c94e33b..4a98bb31e16 100644 --- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java +++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java @@ -216,8 +216,6 @@ public class MiniHBaseCluster extends HBaseCluster { try { LOG.info("Hook closing fs=" + this.fs); this.fs.close(); - } catch (NullPointerException npe) { - LOG.debug("Need to fix these: " + npe.toString()); } catch (IOException e) { LOG.warn("Running hook", e); }