From 6f0761a6681bc3bc8c29621e917d8ba5c026207f Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Fri, 16 Sep 2022 22:57:54 +0800 Subject: [PATCH] HBASE-27373 Fix new spotbugs warnings after upgrading spotbugs to 4.7.2 (#4787) Signed-off-by: Wellington Chevreuil --- .../exceptions/ClientExceptionsUtil.java | 5 ++ .../shaded/protobuf/RequestConverter.java | 63 +++---------------- .../hbase/io/crypto/CryptoCipherProvider.java | 2 + .../io/crypto/DefaultCipherProvider.java | 2 + .../hadoop/hbase/io/util/BlockIOUtils.java | 4 -- .../hadoop/hbase/security/Superusers.java | 26 +++++--- hbase-metrics/pom.xml | 6 ++ .../hbase/metrics/impl/FastLongHistogram.java | 6 +- .../apache/hadoop/hbase/rest/RESTServlet.java | 6 +- .../org/apache/hadoop/hbase/io/FileLink.java | 44 +++---------- .../hbase/io/hfile/FixedFileTrailer.java | 2 +- .../hadoop/hbase/io/hfile/HFileBlock.java | 2 +- .../hbase/io/hfile/HFilePreadReader.java | 4 -- .../hadoop/hbase/io/hfile/HFileUtil.java | 42 ------------- .../hbase/io/hfile/PrefetchExecutor.java | 2 + .../hbase/namequeues/NamedQueueRecorder.java | 4 +- .../hbase/quotas/NoOpRegionSizeStore.java | 2 + ...ingSnapshotViolationPolicyEnforcement.java | 2 + .../hbase/regionserver/ChunkCreator.java | 5 +- .../hadoop/hbase/regionserver/HMobStore.java | 21 ++----- .../regionserver/NoLimitScannerContext.java | 6 +- .../compactions/StripeCompactionPolicy.java | 7 ++- .../regionserver/wal/ProtobufLogWriter.java | 11 +--- .../regionserver/WALEntryStream.java | 4 ++ .../security/token/FsDelegationToken.java | 7 +-- .../visibility/VisibilityLabelsCache.java | 8 ++- .../hadoop/hbase/util/BloomFilterUtil.java | 2 + .../apache/hadoop/hbase/util/HBaseFsck.java | 5 +- .../hbase/wal/AbstractFSWALProvider.java | 5 +- .../hbase/SingleProcessHBaseCluster.java | 2 - .../apache/hadoop/hbase/MiniHBaseCluster.java | 2 - 31 files changed, 107 insertions(+), 202 deletions(-) delete mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java index 71999ad269fb..fd9936dc5025 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.exceptions; +import com.google.errorprone.annotations.RestrictedApi; import java.io.EOFException; import java.io.IOException; import java.io.SyncFailedException; @@ -120,6 +121,10 @@ public static Throwable findException(Object exception) { * For test only. Usually you should use the {@link #isConnectionException(Throwable)} method * below. */ + @RestrictedApi(explanation = "Should only be called in tests", link = "", + allowedOnPath = ".*/src/test/.*") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "test only") public static Set> getConnectionExceptionTypes() { return CONNECTION_EXCEPTION_TYPES; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java index 13e85f91c958..f678a43986d7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java @@ -839,32 +839,20 @@ public static CompactRegionRequest buildCompactRegionRequest(byte[] regionName, return builder.build(); } - /** - * @see #buildRollWALWriterRequest() - */ - private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST = - RollWALWriterRequest.newBuilder().build(); - /** * Create a new RollWALWriterRequest * @return a ReplicateWALEntryRequest */ public static RollWALWriterRequest buildRollWALWriterRequest() { - return ROLL_WAL_WRITER_REQUEST; + return RollWALWriterRequest.getDefaultInstance(); } - /** - * @see #buildGetServerInfoRequest() - */ - private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = - GetServerInfoRequest.newBuilder().build(); - /** * Create a new GetServerInfoRequest * @return a GetServerInfoRequest */ public static GetServerInfoRequest buildGetServerInfoRequest() { - return GET_SERVER_INFO_REQUEST; + return GetServerInfoRequest.getDefaultInstance(); } /** @@ -1241,18 +1229,12 @@ public static GetClusterStatusRequest buildGetClusterStatusRequest(EnumSet superUsers; - private static Set superGroups; + private static ImmutableSet superUsers; + private static ImmutableSet superGroups; private static User systemUser; private Superusers() { @@ -53,8 +53,8 @@ private Superusers() { * @throws IllegalStateException if current user is null */ public static void initialize(Configuration conf) throws IOException { - superUsers = new HashSet<>(); - superGroups = new HashSet<>(); + ImmutableSet.Builder superUsersBuilder = ImmutableSet.builder(); + ImmutableSet.Builder superGroupsBuilder = ImmutableSet.builder(); systemUser = User.getCurrent(); if (systemUser == null) { @@ -64,17 +64,19 @@ public static void initialize(Configuration conf) throws IOException { String currentUser = systemUser.getShortName(); LOG.trace("Current user name is {}", currentUser); - superUsers.add(currentUser); + superUsersBuilder.add(currentUser); String[] superUserList = conf.getStrings(SUPERUSER_CONF_KEY, new String[0]); for (String name : superUserList) { if (AuthUtil.isGroupPrincipal(name)) { // Let's keep the '@' for distinguishing from user. - superGroups.add(name); + superGroupsBuilder.add(name); } else { - superUsers.add(name); + superUsersBuilder.add(name); } } + superUsers = superUsersBuilder.build(); + superGroups = superGroupsBuilder.build(); } /** @@ -113,14 +115,20 @@ public static boolean isSuperUser(String user) { return superUsers.contains(user) || superGroups.contains(user); } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "immutable") public static Collection getSuperUsers() { return superUsers; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "immutable") public static Collection getSuperGroups() { return superGroups; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "by design") public static User getSystemUser() { return systemUser; } diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index ee7a61ed931c..ad8d23676499 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -76,6 +76,12 @@ io.dropwizard.metrics metrics-core + + com.github.stephenc.findbugs + findbugs-annotations + compile + true + junit junit diff --git a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java index b1b47e3904e8..768435d1eb1b 100644 --- a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java +++ b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java @@ -138,6 +138,8 @@ public void add(long value, long count) { /** * Computes the quantiles give the ratios. */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "FL_FLOATS_AS_LOOP_COUNTERS", + justification = "valid usage") public long[] getQuantiles(double[] quantiles) { if (!hasData) { // No data yet. @@ -266,10 +268,6 @@ public FastLongHistogram(int numOfBins, long min, long max) { this.bins = new Bins(bins, numOfBins, 0.01, 0.999); } - private FastLongHistogram(Bins bins) { - this.bins = bins; - } - /** * Adds a value to the histogram. */ diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java index 66834f361767..79760aead9d9 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java @@ -53,6 +53,8 @@ UserGroupInformation getRealUser() { } /** Returns the RESTServlet singleton instance */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static RESTServlet getInstance() { assert (INSTANCE != null); return INSTANCE; @@ -66,8 +68,10 @@ public ConnectionCache getConnectionCache() { /** * @param conf Existing configuration to use in rest servlet * @param userProvider the login user provider - * @return the RESTServlet singleton instance n + * @return the RESTServlet singleton instance */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static RESTServlet getInstance(Configuration conf, UserProvider userProvider) throws IOException { if (INSTANCE == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java index 8fbaa7753915..86f8f9353348 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java @@ -117,12 +117,10 @@ public int read() throws IOException { res = in.read(); } catch (FileNotFoundException e) { res = tryOpen().read(); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().read(); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().read(); } - if (res > 0) pos += 1; + if (res > 0) { + pos += 1; + } return res; } @@ -138,12 +136,10 @@ public int read(byte[] b, int off, int len) throws IOException { n = in.read(b, off, len); } catch (FileNotFoundException e) { n = tryOpen().read(b, off, len); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(b, off, len); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(b, off, len); } - if (n > 0) pos += n; + if (n > 0) { + pos += n; + } assert (in.getPos() == pos); return n; } @@ -155,10 +151,6 @@ public int read(long position, byte[] buffer, int offset, int length) throws IOE n = in.read(position, buffer, offset, length); } catch (FileNotFoundException e) { n = tryOpen().read(position, buffer, offset, length); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(position, buffer, offset, length); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - n = tryOpen().read(position, buffer, offset, length); } return n; } @@ -174,10 +166,6 @@ public void readFully(long position, byte[] buffer, int offset, int length) thro in.readFully(position, buffer, offset, length); } catch (FileNotFoundException e) { tryOpen().readFully(position, buffer, offset, length); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().readFully(position, buffer, offset, length); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().readFully(position, buffer, offset, length); } } @@ -189,13 +177,11 @@ public long skip(long n) throws IOException { skipped = in.skip(n); } catch (FileNotFoundException e) { skipped = tryOpen().skip(n); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - skipped = tryOpen().skip(n); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - skipped = tryOpen().skip(n); } - if (skipped > 0) pos += skipped; + if (skipped > 0) { + pos += skipped; + } return skipped; } @@ -205,10 +191,6 @@ public int available() throws IOException { return in.available(); } catch (FileNotFoundException e) { return tryOpen().available(); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - return tryOpen().available(); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - return tryOpen().available(); } } @@ -218,10 +200,6 @@ public void seek(long pos) throws IOException { in.seek(pos); } catch (FileNotFoundException e) { tryOpen().seek(pos); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().seek(pos); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - tryOpen().seek(pos); } this.pos = pos; } @@ -238,10 +216,6 @@ public boolean seekToNewSource(long targetPos) throws IOException { res = in.seekToNewSource(targetPos); } catch (FileNotFoundException e) { res = tryOpen().seekToNewSource(targetPos); - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().seekToNewSource(targetPos); - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - res = tryOpen().seekToNewSource(targetPos); } if (res) pos = targetPos; return res; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index b358ad606893..134485135ede 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -388,7 +388,7 @@ public static FixedFileTrailer readFromStream(FSDataInputStream istream, long fi bufferSize = (int) fileSize; } - HFileUtil.seekOnMultipleSources(istream, seekPoint); + istream.seek(seekPoint); ByteBuffer buf = ByteBuffer.allocate(bufferSize); istream.readFully(buf.array(), buf.arrayOffset(), buf.arrayOffset() + buf.limit()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 8e04580874fe..097aaff27c84 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -1471,7 +1471,7 @@ protected boolean readAtOffset(FSDataInputStream istream, ByteBuff dest, int siz boolean peekIntoNextBlock, long fileOffset, boolean pread) throws IOException { if (!pread) { // Seek + read. Better for scanning. - HFileUtil.seekOnMultipleSources(istream, fileOffset); + istream.seek(fileOffset); long realOffset = istream.getPos(); if (realOffset != fileOffset) { throw new IOException("Tried to seek to " + fileOffset + " to read " + size diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java index 25627c34f510..98401c46bee7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePreadReader.java @@ -73,10 +73,6 @@ public void run() { if (LOG.isTraceEnabled()) { LOG.trace("Prefetch " + getPathOffsetEndStr(path, offset, end), e); } - } catch (NullPointerException e) { - LOG.warn( - "Stream moved/closed or prefetch cancelled?" + getPathOffsetEndStr(path, offset, end), - e); } catch (Exception e) { // Other exceptions are interesting LOG.warn("Prefetch " + getPathOffsetEndStr(path, offset, end), e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java deleted file mode 100644 index 612f127e11ef..000000000000 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileUtil.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.io.hfile; - -import java.io.IOException; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.yetus.audience.InterfaceAudience; - -@InterfaceAudience.Private -class HFileUtil { - - /** - * guards against NullPointer utility which tries to seek on the DFSIS and will try an alternative - * source if the FSDataInputStream throws an NPE HBASE-17501 nnn - */ - static public void seekOnMultipleSources(FSDataInputStream istream, long offset) - throws IOException { - try { - // attempt to seek inside of current blockReader - istream.seek(offset); - } catch (NullPointerException e) { - // retry the seek on an alternate copy of the data - // this can occur if the blockReader on the DFSInputStream is null - istream.seekToNewSource(offset); - } - } -} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java index 8f10a0f97824..9aafe7a7b6e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java @@ -145,6 +145,8 @@ public static boolean isCompleted(Path path) { return true; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "OBL_UNSATISFIED_OBLIGATION", + justification = "false positive, try-with-resources ensures close is called.") public static void persistToFile(String path) throws IOException { prefetchedFileListPath = path; if (prefetchedFileListPath == null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java index 38f63fd09bec..efe512b1a856 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namequeues/NamedQueueRecorder.java @@ -43,7 +43,7 @@ public class NamedQueueRecorder { private final Disruptor disruptor; private final LogEventHandler logEventHandler; - private static NamedQueueRecorder namedQueueRecorder; + private static volatile NamedQueueRecorder namedQueueRecorder; private static boolean isInit = false; private static final Object LOCK = new Object(); @@ -71,6 +71,8 @@ private NamedQueueRecorder(Configuration conf) { this.disruptor.start(); } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static NamedQueueRecorder getInstance(Configuration conf) { if (namedQueueRecorder != null) { return namedQueueRecorder; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java index dcc32d766b9a..cb463b8729d3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoOpRegionSizeStore.java @@ -32,6 +32,8 @@ public final class NoOpRegionSizeStore implements RegionSizeStore { private NoOpRegionSizeStore() { } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static NoOpRegionSizeStore getInstance() { return INSTANCE; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java index c747d0c8b3d9..732318ac870e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/MissingSnapshotViolationPolicyEnforcement.java @@ -39,6 +39,8 @@ public final class MissingSnapshotViolationPolicyEnforcement private MissingSnapshotViolationPolicyEnforcement() { } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static SpaceViolationPolicyEnforcement getInstance() { return SINGLETON; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java index a0dbf4e59e9b..ba9cd9d13ec7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java @@ -114,7 +114,8 @@ private void initializePools(int chunkSize, long globalMemStoreSize, float poolS * @param heapMemoryManager the heapmemory manager * @return singleton MSLABChunkCreator */ - @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "LI_LAZY_INIT_STATIC", + @edu.umd.cs.findbugs.annotations.SuppressWarnings( + value = { "LI_LAZY_INIT_STATIC", "MS_EXPOSE_REP" }, justification = "Method is called by single thread at the starting of RS") public static ChunkCreator initialize(int chunkSize, boolean offheap, long globalMemStoreSize, float poolSizePercentage, float initialCountPercentage, HeapMemoryManager heapMemoryManager, @@ -127,6 +128,8 @@ public static ChunkCreator initialize(int chunkSize, boolean offheap, long globa return instance; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static ChunkCreator getInstance() { return instance; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index 13b7cc022bb9..b5396110db2e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -400,7 +400,7 @@ public List getLocations(TableName tableName) throws IOException { private MobCell readCell(List locations, String fileName, Cell search, boolean cacheMobBlocks, long readPt, boolean readEmptyValueOnMobCellMiss) throws IOException { FileSystem fs = getFileSystem(); - Throwable throwable = null; + IOException ioe = null; for (Path location : locations) { MobFile file = null; Path path = new Path(location, fileName); @@ -411,7 +411,7 @@ private MobCell readCell(List locations, String fileName, Cell search, : file.readCell(search, cacheMobBlocks); } catch (IOException e) { mobFileCache.evictFile(fileName); - throwable = e; + ioe = e; if ( (e instanceof FileNotFoundException) || (e.getCause() instanceof FileNotFoundException) ) { @@ -422,14 +422,6 @@ private MobCell readCell(List locations, String fileName, Cell search, } else { throw e; } - } catch (NullPointerException e) { // HDFS 1.x - DFSInputStream.getBlockAt() - mobFileCache.evictFile(fileName); - LOG.debug("Fail to read the cell", e); - throwable = e; - } catch (AssertionError e) { // assert in HDFS 1.x - DFSInputStream.getBlockAt() - mobFileCache.evictFile(fileName); - LOG.debug("Fail to read the cell", e); - throwable = e; } finally { if (file != null) { mobFileCache.closeFile(file); @@ -441,18 +433,15 @@ private MobCell readCell(List locations, String fileName, Cell search, if (readEmptyValueOnMobCellMiss) { return null; } else if ( - (throwable instanceof FileNotFoundException) - || (throwable.getCause() instanceof FileNotFoundException) + (ioe instanceof FileNotFoundException) || (ioe.getCause() instanceof FileNotFoundException) ) { // The region is re-opened when FileNotFoundException is thrown. // This is not necessary when MOB files cannot be found, because the store files // in a region only contain the references to MOB files and a re-open on a region // doesn't help fix the lost MOB files. - throw new DoNotRetryIOException(throwable); - } else if (throwable instanceof IOException) { - throw (IOException) throwable; + throw new DoNotRetryIOException(ioe); } else { - throw new IOException(throwable); + throw ioe; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java index 5b92cc07b57a..94c973743058 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoLimitScannerContext.java @@ -45,9 +45,11 @@ public NoLimitScannerContext() { private static final ScannerContext NO_LIMIT = new NoLimitScannerContext(); /** - * @return The static, immutable instance of {@link NoLimitScannerContext} to be used whenever - * limits should not be enforced + * Returns the static, immutable instance of {@link NoLimitScannerContext} to be used whenever + * limits should not be enforced */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static final ScannerContext getInstance() { return NO_LIMIT; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java index adcd41f3e267..f5be2b380382 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java @@ -421,6 +421,8 @@ public static long getTotalFileSize(final Collection candidates) { return totalSize; } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "FL_FLOATS_AS_LOOP_COUNTERS", + justification = "valid usage") private Pair estimateTargetKvs(Collection files, double splitCount) { // If the size is larger than what we target, we don't want to split into proportionally // larger parts and then have to split again very soon. So, we will increase the multiplier @@ -433,7 +435,10 @@ private Pair estimateTargetKvs(Collection files, doub while (ratio > 1.0) { // Ratio of real to desired size if we increase the multiplier. double newRatio = totalSize / ((splitCount + 1.0) * targetPartSize); - if ((1.0 / newRatio) >= ratio) break; // New ratio is < 1.0, but further than the last one. + if ((1.0 / newRatio) >= ratio) { + // New ratio is < 1.0, but further than the last one. + break; + } ratio = newRatio; splitCount += 1.0; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index a94449fe01e7..212788c940ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -66,15 +66,10 @@ public void append(Entry entry) throws IOException { @Override public void close() throws IOException { if (this.output != null) { - try { - if (!trailerWritten) { - writeWALTrailer(); - } - this.output.close(); - } catch (NullPointerException npe) { - // Can get a NPE coming up from down in DFSClient$DFSOutputStream#close - LOG.warn(npe.toString(), npe); + if (!trailerWritten) { + writeWALTrailer(); } + this.output.close(); this.output = null; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java index 1d9f868e52fd..42509eaebff1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java @@ -321,6 +321,8 @@ private void handleFileNotFound(Path path, FileNotFoundException fnfe) throws IO } } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") private void openReader(Path path) throws IOException { try { // Detect if this is a new file, if so get a new reader else @@ -371,6 +373,8 @@ public boolean progress() { } } + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") private void resetReader() throws IOException { try { currentEntry = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java index 1eb88a3d12f4..51961a92370d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java @@ -98,12 +98,7 @@ public void acquireDelegationToken(final String tokenKind, final FileSystem fs) userToken = userProvider.getCurrent().getToken(tokenKind, fs.getCanonicalServiceName()); if (userToken == null) { hasForwardedToken = false; - try { - userToken = fs.getDelegationToken(renewer); - } catch (NullPointerException npe) { - // we need to handle NullPointerException in case HADOOP-10009 is missing - LOG.error("Failed to get token for " + renewer); - } + userToken = fs.getDelegationToken(renewer); } else { hasForwardedToken = true; LOG.info("Use the existing token: " + userToken); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java index 22168c5513f4..4b1d2f4d84f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java @@ -75,9 +75,11 @@ private VisibilityLabelsCache(ZKWatcher watcher, Configuration conf) throws IOEx } /** - * Creates the singleton instance, if not yet present, and returns the same. nn * @return - * Singleton instance of VisibilityLabelsCache n + * Creates the singleton instance, if not yet present, and returns the same. + * @return Singleton instance of VisibilityLabelsCache */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public synchronized static VisibilityLabelsCache createAndGet(ZKWatcher watcher, Configuration conf) throws IOException { // VisibilityLabelService#init() for different regions (in same RS) passes same instance of @@ -96,6 +98,8 @@ public synchronized static VisibilityLabelsCache createAndGet(ZKWatcher watcher, * @return Singleton instance of VisibilityLabelsCache n * when this is called before calling * {@link #createAndGet(ZKWatcher, Configuration)} */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "MS_EXPOSE_REP", + justification = "singleton pattern") public static VisibilityLabelsCache get() { // By the time this method is called, the singleton instance of VisibilityLabelsCache should // have been created. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java index b35e8258ddfb..7b8a5cd241ab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterUtil.java @@ -76,6 +76,8 @@ public static long computeBitSize(long maxKeys, double errorRate) { * This gets used in {@link #contains(ByteBuff, int, int, Hash, int, HashKey)} * @param random The random number source to use, or null to compute actual hashes */ + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "EI_EXPOSE_STATIC_REP2", + justification = "ignore for now, improve TestCompoundBloomFilter later") public static void setRandomGeneratorForTest(Random random) { randomGeneratorForTest = random; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 41505b92c6af..75572b4c53e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -983,12 +983,9 @@ private void adoptHdfsOrphan(HbckRegionInfo hi) throws IOException { start = CellUtil.cloneRow(startKv.get()); Optional endKv = hf.getLastKey(); end = CellUtil.cloneRow(endKv.get()); - } catch (IOException ioe) { + } catch (Exception ioe) { LOG.warn("Problem reading orphan file " + hfile + ", skipping"); continue; - } catch (NullPointerException ioe) { - LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping"); - continue; } finally { if (hf != null) { hf.close(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java index be9fb23fa3c7..327009504607 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java @@ -504,8 +504,9 @@ public static Path findArchivedLog(Path path, Configuration conf) throws IOExcep * @param conf configuration * @return WAL Reader instance */ - public static org.apache.hadoop.hbase.wal.WAL.Reader openReader(Path path, Configuration conf) - throws IOException { + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "DCN_NULLPOINTER_EXCEPTION", + justification = "HDFS-4380") + public static WAL.Reader openReader(Path path, Configuration conf) throws IOException { long retryInterval = 2000; // 2 sec int maxAttempts = 30; int attempt = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java index 5e72464ecb55..92db27fdbe3f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/SingleProcessHBaseCluster.java @@ -210,8 +210,6 @@ public void run() { try { LOG.info("Hook closing fs=" + this.fs); this.fs.close(); - } catch (NullPointerException npe) { - LOG.debug("Need to fix these: " + npe.toString()); } catch (IOException e) { LOG.warn("Running hook", e); } diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java index 8e66c94e33b1..4a98bb31e164 100644 --- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java +++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/MiniHBaseCluster.java @@ -216,8 +216,6 @@ public void run() { try { LOG.info("Hook closing fs=" + this.fs); this.fs.close(); - } catch (NullPointerException npe) { - LOG.debug("Need to fix these: " + npe.toString()); } catch (IOException e) { LOG.warn("Running hook", e); }