Skip to content

Commit 1b29c9b

Browse files
authored
HADOOP-17138. Fix spotbugs warnings surfaced after upgrade to 4.0.6. (#2155)
1 parent d23cc9d commit 1b29c9b

File tree

9 files changed

+50
-27
lines changed

9 files changed

+50
-27
lines changed

hadoop-cloud-storage-project/hadoop-cos/dev-support/findbugs-exclude.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616
-->
1717
<FindBugsFilter>
1818
<Match>
19-
<Class name="org.apache.hadoop.fs.cosn.CosNInputStream.ReadBuffer"/>
19+
<Class name="org.apache.hadoop.fs.cosn.CosNInputStream$ReadBuffer"/>
2020
<Method name="getBuffer"/>
21-
<Bug pattern="EI_EXPOSE_REP"/>h_LIB
21+
<Bug pattern="EI_EXPOSE_REP"/>
2222
</Match>
2323
</FindBugsFilter>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3714,7 +3714,7 @@ void incrUserConnections(String user) {
37143714
if (count == null) {
37153715
count = 1;
37163716
} else {
3717-
count++;
3717+
count = count + 1;
37183718
}
37193719
userToConnectionsMap.put(user, count);
37203720
}
@@ -3726,7 +3726,7 @@ void decrUserConnections(String user) {
37263726
if (count == null) {
37273727
return;
37283728
} else {
3729-
count--;
3729+
count = count - 1;
37303730
}
37313731
if (count == 0) {
37323732
userToConnectionsMap.remove(user);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/DatasetVolumeChecker.java

Lines changed: 21 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -354,23 +354,29 @@ private class ResultHandler
354354
}
355355

356356
@Override
357-
public void onSuccess(@Nonnull VolumeCheckResult result) {
358-
switch(result) {
359-
case HEALTHY:
360-
case DEGRADED:
361-
LOG.debug("Volume {} is {}.", reference.getVolume(), result);
362-
markHealthy();
363-
break;
364-
case FAILED:
365-
LOG.warn("Volume {} detected as being unhealthy",
357+
public void onSuccess(VolumeCheckResult result) {
358+
if (result == null) {
359+
LOG.error("Unexpected health check result null for volume {}",
366360
reference.getVolume());
367-
markFailed();
368-
break;
369-
default:
370-
LOG.error("Unexpected health check result {} for volume {}",
371-
result, reference.getVolume());
372361
markHealthy();
373-
break;
362+
} else {
363+
switch(result) {
364+
case HEALTHY:
365+
case DEGRADED:
366+
LOG.debug("Volume {} is {}.", reference.getVolume(), result);
367+
markHealthy();
368+
break;
369+
case FAILED:
370+
LOG.warn("Volume {} detected as being unhealthy",
371+
reference.getVolume());
372+
markFailed();
373+
break;
374+
default:
375+
LOG.error("Unexpected health check result {} for volume {}",
376+
result, reference.getVolume());
377+
markHealthy();
378+
break;
379+
}
374380
}
375381
cleanup();
376382
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/ThrottledAsyncChecker.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ private void addResultCachingCallback(
166166
Checkable<K, V> target, ListenableFuture<V> lf) {
167167
Futures.addCallback(lf, new FutureCallback<V>() {
168168
@Override
169-
public void onSuccess(@Nullable V result) {
169+
public void onSuccess(V result) {
170170
synchronized (ThrottledAsyncChecker.this) {
171171
checksInProgress.remove(target);
172172
completedChecks.put(target, new LastCheckResult<>(

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1238,7 +1238,7 @@ private void incrOpCount(FSEditLogOpCodes opCode,
12381238
holder = new Holder<Integer>(1);
12391239
opCounts.put(opCode, holder);
12401240
} else {
1241-
holder.held++;
1241+
holder.held = holder.held + 1;
12421242
}
12431243
counter.increment();
12441244
}

hadoop-mapreduce-project/dev-support/findbugs-exclude.xml

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -533,5 +533,17 @@
533533
<Class name="org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1" />
534534
<Bug pattern="SE_BAD_FIELD_INNER_CLASS" />
535535
</Match>
536-
536+
537+
<!--
538+
HADOOP-17138: Suppress warnings about unchecked Nullable
539+
since the methoad catches NullPointerException then registerError.
540+
-->
541+
<Match>
542+
<Or>
543+
<Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInputDirCallback" />
544+
<Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInitialInputPathCallback" />
545+
</Or>
546+
<Method name="onSuccess" />
547+
<Bug pattern="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE" />
548+
</Match>
537549
</FindBugsFilter>

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -813,7 +813,7 @@ private void increaseQueueAppNum(String queue) throws YarnException {
813813
if (appNum == null) {
814814
appNum = 1;
815815
} else {
816-
appNum++;
816+
appNum = appNum + 1;
817817
}
818818

819819
queueAppNumMap.put(queueName, appNum);

hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -705,4 +705,10 @@
705705
<Method name="getDevices" />
706706
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
707707
</Match>
708+
709+
<!-- Suppress warning about anonymous class for mocking. -->
710+
<Match>
711+
<Class name="~org\.apache\.hadoop\.yarn\.server\.timelineservice\.reader\.TestTimelineReaderWebServicesHBaseStorage.*" />
712+
<Bug pattern="UMAC_UNCALLABLE_METHOD_OF_ANONYMOUS_CLASS" />
713+
</Match>
708714
</FindBugsFilter>

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineReaderHBaseDown.java

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -181,14 +181,13 @@ private static void waitForHBaseDown(HBaseTimelineReaderImpl htr) throws
181181
}
182182
}
183183

184-
private static void checkQuery(HBaseTimelineReaderImpl htr) throws
185-
IOException {
184+
private static Set<TimelineEntity> checkQuery(HBaseTimelineReaderImpl htr)
185+
throws IOException {
186186
TimelineReaderContext context =
187187
new TimelineReaderContext(YarnConfiguration.DEFAULT_RM_CLUSTER_ID,
188188
null, null, null, null, TimelineEntityType
189189
.YARN_FLOW_ACTIVITY.toString(), null, null);
190-
Set<TimelineEntity> entities = htr.getEntities(context, MONITOR_FILTERS,
191-
DATA_TO_RETRIEVE);
190+
return htr.getEntities(context, MONITOR_FILTERS, DATA_TO_RETRIEVE);
192191
}
193192

194193
private static void configure(HBaseTestingUtility util) {

0 commit comments

Comments
 (0)