Skip to content

Commit 1f71c4a

Browse files
amahusseinjteagles
authored andcommitted
HADOOP-17099. Replace Guava Predicate with Java8+ Predicate
Signed-off-by: Jonathan Eagles <jeagles@gmail.com>
1 parent 98fcffe commit 1f71c4a

File tree

10 files changed

+91
-161
lines changed

10 files changed

+91
-161
lines changed

hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@
123123
<property name="regexp" value="true"/>
124124
<property name="illegalPkgs" value="^sun\.[^.]+"/>
125125
<property name="illegalClasses"
126-
value="^com\.google\.common\.base\.(Optional|Function), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
126+
value="^com\.google\.common\.base\.(Optional|Function|Predicate), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
127127
</module>
128128
<module name="RedundantImport"/>
129129
<module name="UnusedImports"/>

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/MetricsRecords.java

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818

1919
package org.apache.hadoop.metrics2.impl;
2020

21-
import com.google.common.base.Predicate;
22-
import com.google.common.collect.Iterables;
21+
import java.util.function.Predicate;
22+
import java.util.stream.StreamSupport;
2323
import org.apache.hadoop.metrics2.AbstractMetric;
2424
import org.apache.hadoop.metrics2.MetricsRecord;
2525
import org.apache.hadoop.metrics2.MetricsTag;
@@ -65,16 +65,22 @@ public static void assertMetricNotNull(MetricsRecord record,
6565
resourceLimitMetric);
6666
}
6767

68-
private static MetricsTag getFirstTagByName(MetricsRecord record, String name) {
69-
return Iterables.getFirst(Iterables.filter(record.tags(),
70-
new MetricsTagPredicate(name)), null);
68+
private static MetricsTag getFirstTagByName(MetricsRecord record,
69+
String name) {
70+
if (record.tags() == null) {
71+
return null;
72+
}
73+
return record.tags().stream().filter(
74+
new MetricsTagPredicate(name)).findFirst().orElse(null);
7175
}
7276

7377
private static AbstractMetric getFirstMetricByName(
7478
MetricsRecord record, String name) {
75-
return Iterables.getFirst(
76-
Iterables.filter(record.metrics(), new AbstractMetricPredicate(name)),
77-
null);
79+
if (record.metrics() == null) {
80+
return null;
81+
}
82+
return StreamSupport.stream(record.metrics().spliterator(), false)
83+
.filter(new AbstractMetricPredicate(name)).findFirst().orElse(null);
7884
}
7985

8086
private static class MetricsTagPredicate implements Predicate<MetricsTag> {
@@ -86,7 +92,7 @@ public MetricsTagPredicate(String tagName) {
8692
}
8793

8894
@Override
89-
public boolean apply(MetricsTag input) {
95+
public boolean test(MetricsTag input) {
9096
return input.name().equals(tagName);
9197
}
9298
}
@@ -101,7 +107,7 @@ public AbstractMetricPredicate(
101107
}
102108

103109
@Override
104-
public boolean apply(AbstractMetric input) {
110+
public boolean test(AbstractMetric input) {
105111
return input.name().equals(metricName);
106112
}
107113
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java

Lines changed: 10 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,7 @@
2323
import java.util.*;
2424
import java.util.concurrent.*;
2525
import java.util.concurrent.atomic.*;
26-
27-
import javax.annotation.Nullable;
28-
26+
import java.util.stream.StreamSupport;
2927
import org.junit.Test;
3028
import org.junit.runner.RunWith;
3129

@@ -38,7 +36,6 @@
3836
import static org.junit.Assert.*;
3937
import static org.mockito.Mockito.*;
4038

41-
import com.google.common.base.Predicate;
4239
import com.google.common.base.Supplier;
4340
import com.google.common.collect.Iterables;
4441

@@ -59,7 +56,6 @@
5956
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
6057
import org.apache.hadoop.metrics2.lib.MutableRate;
6158
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
62-
import org.apache.hadoop.util.StringUtils;
6359
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
6460
import org.slf4j.Logger;
6561
import org.slf4j.LoggerFactory;
@@ -246,13 +242,9 @@ public void run() {
246242
for (Thread t : threads)
247243
t.join();
248244
assertEquals(0L, ms.droppedPubAll.value());
249-
assertTrue(StringUtils.join("\n", Arrays.asList(results)),
250-
Iterables.all(Arrays.asList(results), new Predicate<String>() {
251-
@Override
252-
public boolean apply(@Nullable String input) {
253-
return input.equalsIgnoreCase("Passed");
254-
}
255-
}));
245+
assertTrue(String.join("\n", Arrays.asList(results)),
246+
Arrays.asList(results).stream().allMatch(
247+
input -> input.equalsIgnoreCase("Passed")));
256248
ms.stop();
257249
ms.shutdown();
258250
}
@@ -482,14 +474,12 @@ public Object answer(InvocationOnMock invocation) throws Throwable {
482474
ms.onTimerEvent();
483475
verify(dataSink, timeout(500).times(2)).putMetrics(r1.capture());
484476
List<MetricsRecord> mr = r1.getAllValues();
485-
Number qSize = Iterables.find(mr.get(1).metrics(),
486-
new Predicate<AbstractMetric>() {
487-
@Override
488-
public boolean apply(@Nullable AbstractMetric input) {
489-
assert input != null;
490-
return input.name().equals("Sink_slowSinkQsize");
491-
}
492-
}).value();
477+
Number qSize = StreamSupport.stream(mr.get(1).metrics().spliterator(),
478+
false).filter(
479+
input -> {
480+
assert input != null;
481+
return input.name().equals("Sink_slowSinkQsize");
482+
}).findFirst().get().value();
493483
assertEquals(1, qSize);
494484
} finally {
495485
proceedSignal.countDown();

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/CombinedHostFileManager.java

Lines changed: 26 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,8 @@
2121
import com.google.common.collect.HashMultimap;
2222
import com.google.common.collect.Multimap;
2323
import com.google.common.collect.UnmodifiableIterator;
24-
import com.google.common.collect.Iterables;
25-
import com.google.common.collect.Collections2;
2624

25+
import java.util.stream.Collectors;
2726
import org.slf4j.Logger;
2827
import org.slf4j.LoggerFactory;
2928
import org.apache.hadoop.conf.Configuration;
@@ -40,7 +39,7 @@
4039
import java.util.Iterator;
4140
import java.util.Map;
4241

43-
import com.google.common.base.Predicate;
42+
4443

4544
import org.apache.hadoop.hdfs.util.CombinedHostsFileReader;
4645

@@ -82,37 +81,26 @@ synchronized void add(InetAddress addr,
8281
// If the includes list is empty, act as if everything is in the
8382
// includes list.
8483
synchronized boolean isIncluded(final InetSocketAddress address) {
85-
return emptyInServiceNodeLists || Iterables.any(
86-
allDNs.get(address.getAddress()),
87-
new Predicate<DatanodeAdminProperties>() {
88-
public boolean apply(DatanodeAdminProperties input) {
89-
return input.getPort() == 0 ||
90-
input.getPort() == address.getPort();
91-
}
92-
});
84+
return emptyInServiceNodeLists || allDNs.get(address.getAddress())
85+
.stream().anyMatch(
86+
input -> input.getPort() == 0 ||
87+
input.getPort() == address.getPort());
9388
}
9489

9590
synchronized boolean isExcluded(final InetSocketAddress address) {
96-
return Iterables.any(allDNs.get(address.getAddress()),
97-
new Predicate<DatanodeAdminProperties>() {
98-
public boolean apply(DatanodeAdminProperties input) {
99-
return input.getAdminState().equals(
100-
AdminStates.DECOMMISSIONED) &&
101-
(input.getPort() == 0 ||
102-
input.getPort() == address.getPort());
103-
}
104-
});
91+
return allDNs.get(address.getAddress()).stream().anyMatch(
92+
input -> input.getAdminState().equals(
93+
AdminStates.DECOMMISSIONED) &&
94+
(input.getPort() == 0 ||
95+
input.getPort() == address.getPort()));
10596
}
10697

10798
synchronized String getUpgradeDomain(final InetSocketAddress address) {
108-
Iterable<DatanodeAdminProperties> datanode = Iterables.filter(
109-
allDNs.get(address.getAddress()),
110-
new Predicate<DatanodeAdminProperties>() {
111-
public boolean apply(DatanodeAdminProperties input) {
112-
return (input.getPort() == 0 ||
113-
input.getPort() == address.getPort());
114-
}
115-
});
99+
Iterable<DatanodeAdminProperties> datanode =
100+
allDNs.get(address.getAddress()).stream().filter(
101+
input -> (input.getPort() == 0 ||
102+
input.getPort() == address.getPort())).collect(
103+
Collectors.toList());
116104
return datanode.iterator().hasNext() ?
117105
datanode.iterator().next().getUpgradeDomain() : null;
118106
}
@@ -127,36 +115,22 @@ public Iterator<InetSocketAddress> iterator() {
127115
}
128116

129117
Iterable<InetSocketAddress> getExcludes() {
130-
return new Iterable<InetSocketAddress>() {
131-
@Override
132-
public Iterator<InetSocketAddress> iterator() {
133-
return new HostIterator(
134-
Collections2.filter(allDNs.entries(),
135-
new Predicate<java.util.Map.Entry<InetAddress,
136-
DatanodeAdminProperties>>() {
137-
public boolean apply(java.util.Map.Entry<InetAddress,
138-
DatanodeAdminProperties> entry) {
139-
return entry.getValue().getAdminState().equals(
140-
AdminStates.DECOMMISSIONED);
141-
}
142-
}
143-
));
144-
}
145-
};
118+
return () -> new HostIterator(
119+
allDNs.entries().stream().filter(
120+
entry -> entry.getValue().getAdminState().equals(
121+
AdminStates.DECOMMISSIONED)).collect(
122+
Collectors.toList()));
146123
}
147124

148125
synchronized long getMaintenanceExpireTimeInMS(
149126
final InetSocketAddress address) {
150-
Iterable<DatanodeAdminProperties> datanode = Iterables.filter(
151-
allDNs.get(address.getAddress()),
152-
new Predicate<DatanodeAdminProperties>() {
153-
public boolean apply(DatanodeAdminProperties input) {
154-
return input.getAdminState().equals(
127+
Iterable<DatanodeAdminProperties> datanode =
128+
allDNs.get(address.getAddress()).stream().filter(
129+
input -> input.getAdminState().equals(
155130
AdminStates.IN_MAINTENANCE) &&
156131
(input.getPort() == 0 ||
157-
input.getPort() == address.getPort());
158-
}
159-
});
132+
input.getPort() == address.getPort())).collect(
133+
Collectors.toList());
160134
// if DN isn't set to maintenance state, ignore MaintenanceExpireTimeInMS
161135
// set in the config.
162136
return datanode.iterator().hasNext() ?

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import java.util.Collection;
2525
import java.util.HashMap;
2626
import java.util.Map;
27-
27+
import java.util.stream.Collectors;
2828
import org.slf4j.Logger;
2929
import org.slf4j.LoggerFactory;
3030
import org.apache.hadoop.classification.InterfaceAudience;
@@ -34,8 +34,6 @@
3434
import org.apache.hadoop.hdfs.server.common.Util;
3535

3636
import com.google.common.annotations.VisibleForTesting;
37-
import com.google.common.collect.Collections2;
38-
import com.google.common.base.Predicate;
3937

4038
/**
4139
*
@@ -116,18 +114,15 @@ public NameNodeResourceChecker(Configuration conf) throws IOException {
116114

117115
Collection<URI> extraCheckedVolumes = Util.stringCollectionAsURIs(conf
118116
.getTrimmedStringCollection(DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_KEY));
119-
120-
Collection<URI> localEditDirs = Collections2.filter(
121-
FSNamesystem.getNamespaceEditsDirs(conf),
122-
new Predicate<URI>() {
123-
@Override
124-
public boolean apply(URI input) {
125-
if (input.getScheme().equals(NNStorage.LOCAL_URI_SCHEME)) {
126-
return true;
127-
}
128-
return false;
129-
}
130-
});
117+
118+
Collection<URI> localEditDirs =
119+
FSNamesystem.getNamespaceEditsDirs(conf).stream().filter(
120+
input -> {
121+
if (input.getScheme().equals(NNStorage.LOCAL_URI_SCHEME)) {
122+
return true;
123+
}
124+
return false;
125+
}).collect(Collectors.toList());
131126

132127
// Add all the local edits dirs, marking some as required if they are
133128
// configured as such.

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/Snapshot.java

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import java.util.Arrays;
2525
import java.util.Comparator;
2626
import java.util.Date;
27-
27+
import java.util.stream.Collectors;
2828
import org.apache.hadoop.classification.InterfaceAudience;
2929
import org.apache.hadoop.fs.Path;
3030
import org.apache.hadoop.hdfs.DFSUtil;
@@ -38,9 +38,6 @@
3838
import org.apache.hadoop.hdfs.server.namenode.XAttrFeature;
3939
import org.apache.hadoop.hdfs.util.ReadOnlyList;
4040

41-
import com.google.common.base.Predicate;
42-
import com.google.common.collect.Iterables;
43-
import com.google.common.collect.Lists;
4441
import org.apache.hadoop.security.AccessControlException;
4542

4643
/** Snapshot of a sub-tree in the namesystem. */
@@ -149,20 +146,14 @@ static Snapshot read(DataInput in, FSImageFormat.Loader loader)
149146
static public class Root extends INodeDirectory {
150147
Root(INodeDirectory other) {
151148
// Always preserve ACL, XAttr.
152-
super(other, false, Lists.newArrayList(
153-
Iterables.filter(Arrays.asList(other.getFeatures()), new Predicate<Feature>() {
154-
155-
@Override
156-
public boolean apply(Feature input) {
157-
if (AclFeature.class.isInstance(input)
149+
super(other, false, Arrays.asList(other.getFeatures()).stream().filter(
150+
input -> {
151+
if (AclFeature.class.isInstance(input)
158152
|| XAttrFeature.class.isInstance(input)) {
159153
return true;
160154
}
161155
return false;
162-
}
163-
164-
}))
165-
.toArray(new Feature[0]));
156+
}).collect(Collectors.toList()).toArray(new Feature[0]));
166157
}
167158

168159
@Override

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@
7474
import org.slf4j.Logger;
7575
import org.slf4j.LoggerFactory;
7676
import com.google.common.annotations.VisibleForTesting;
77-
import com.google.common.base.Predicate;
7877
import com.google.common.collect.Iterables;
7978
import com.google.common.collect.Sets;
8079

@@ -355,14 +354,9 @@ private Set<File> getFileCandidates(Set<File> candidates,
355354
: this.logAggregationContext.getRolledLogsExcludePattern(),
356355
candidates, true);
357356

358-
Iterable<File> mask =
359-
Iterables.filter(candidates, new Predicate<File>() {
360-
@Override
361-
public boolean apply(File next) {
362-
return !alreadyUploadedLogFiles
363-
.contains(getLogFileMetaData(next));
364-
}
365-
});
357+
Iterable<File> mask = Iterables.filter(candidates, (input) ->
358+
!alreadyUploadedLogFiles
359+
.contains(getLogFileMetaData(input)));
366360
return Sets.newHashSet(mask);
367361
}
368362

0 commit comments

Comments
 (0)