Skip to content

Commit 98fcffe

Browse files
amahusseinjteagles
authored andcommitted
HADOOP-17101. Replace Guava Function with Java8+ Function
Signed-off-by: Jonathan Eagles <jeagles@gmail.com>
1 parent 80046d1 commit 98fcffe

File tree

7 files changed

+44
-71
lines changed

7 files changed

+44
-71
lines changed

hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@
123123
<property name="regexp" value="true"/>
124124
<property name="illegalPkgs" value="^sun\.[^.]+"/>
125125
<property name="illegalClasses"
126-
value="^com\.google\.common\.base\.(Optional)"/>
126+
value="^com\.google\.common\.base\.(Optional|Function), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
127127
</module>
128128
<module name="RedundantImport"/>
129129
<module name="UnusedImports"/>

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,12 @@
1717
*/
1818
package org.apache.hadoop.hdfs.server.blockmanagement;
1919

20-
import com.google.common.base.Function;
21-
import com.google.common.base.Joiner;
20+
2221
import com.google.common.base.Preconditions;
2322
import com.google.common.collect.HashMultimap;
24-
import com.google.common.collect.Iterators;
2523
import com.google.common.collect.Multimap;
2624
import com.google.common.collect.UnmodifiableIterator;
2725

28-
import javax.annotation.Nullable;
2926
import java.net.InetAddress;
3027
import java.net.InetSocketAddress;
3128
import java.util.Collection;
@@ -101,14 +98,16 @@ public InetSocketAddress next() {
10198
@Override
10299
public String toString() {
103100
StringBuilder sb = new StringBuilder("HostSet(");
104-
Joiner.on(",").appendTo(sb, Iterators.transform(iterator(),
105-
new Function<InetSocketAddress, String>() {
106-
@Override
107-
public String apply(@Nullable InetSocketAddress addr) {
108-
assert addr != null;
109-
return addr.getAddress().getHostAddress() + ":" + addr.getPort();
110-
}
111-
}));
112-
return sb.append(")").toString();
101+
Iterator<InetSocketAddress> iter = iterator();
102+
String sep = "";
103+
while (iter.hasNext()) {
104+
InetSocketAddress addr = iter.next();
105+
sb.append(sep);
106+
sb.append(addr.getAddress().getHostAddress());
107+
sb.append(':');
108+
sb.append(addr.getPort());
109+
sep = ",";
110+
}
111+
return sb.append(')').toString();
113112
}
114113
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,10 @@
2424
import java.util.Collection;
2525
import java.util.Collections;
2626
import java.util.Comparator;
27+
import java.util.HashMap;
2728
import java.util.LinkedList;
2829
import java.util.List;
30+
import java.util.Map;
2931
import java.util.PriorityQueue;
3032
import java.util.SortedSet;
3133
import java.util.concurrent.CopyOnWriteArrayList;
@@ -38,13 +40,9 @@
3840
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
3941
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
4042
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
41-
4243
import com.google.common.annotations.VisibleForTesting;
4344
import com.google.common.base.Preconditions;
44-
import com.google.common.collect.ImmutableList;
45-
import com.google.common.collect.ImmutableListMultimap;
4645
import com.google.common.collect.Lists;
47-
import com.google.common.collect.Multimaps;
4846
import com.google.common.collect.Sets;
4947

5048
/**
@@ -634,7 +632,7 @@ public void apply(JournalAndStream jas) throws IOException {
634632
*/
635633
public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) {
636634
// Collect RemoteEditLogs available from each FileJournalManager
637-
List<RemoteEditLog> allLogs = Lists.newArrayList();
635+
List<RemoteEditLog> allLogs = new ArrayList<>();
638636
for (JournalAndStream j : journals) {
639637
if (j.getManager() instanceof FileJournalManager) {
640638
FileJournalManager fjm = (FileJournalManager)j.getManager();
@@ -645,15 +643,17 @@ public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) {
645643
}
646644
}
647645
}
648-
649646
// Group logs by their starting txid
650-
ImmutableListMultimap<Long, RemoteEditLog> logsByStartTxId =
651-
Multimaps.index(allLogs, RemoteEditLog.GET_START_TXID);
647+
final Map<Long, List<RemoteEditLog>> logsByStartTxId = new HashMap<>();
648+
allLogs.forEach(input -> {
649+
long key = RemoteEditLog.GET_START_TXID.apply(input);
650+
logsByStartTxId.computeIfAbsent(key, k-> new ArrayList<>()).add(input);
651+
});
652652
long curStartTxId = fromTxId;
653-
654-
List<RemoteEditLog> logs = Lists.newArrayList();
653+
List<RemoteEditLog> logs = new ArrayList<>();
655654
while (true) {
656-
ImmutableList<RemoteEditLog> logGroup = logsByStartTxId.get(curStartTxId);
655+
List<RemoteEditLog> logGroup =
656+
logsByStartTxId.getOrDefault(curStartTxId, Collections.emptyList());
657657
if (logGroup.isEmpty()) {
658658
// we have a gap in logs - for example because we recovered some old
659659
// storage directory with ancient logs. Clear out any logs we've

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717
*/
1818
package org.apache.hadoop.hdfs.server.protocol;
1919

20-
import com.google.common.base.Function;
2120
import com.google.common.collect.ComparisonChain;
21+
import java.util.function.Function;
2222
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
2323

2424
public class RemoteEditLog implements Comparable<RemoteEditLog> {
@@ -82,16 +82,13 @@ public int hashCode() {
8282
}
8383

8484
/**
85-
* Guava <code>Function</code> which applies {@link #getStartTxId()}
85+
* Java <code>Function</code> which applies {@link #getStartTxId()}
8686
*/
8787
public static final Function<RemoteEditLog, Long> GET_START_TXID =
88-
new Function<RemoteEditLog, Long>() {
89-
@Override
90-
public Long apply(RemoteEditLog log) {
88+
log -> {
9189
if (null == log) {
9290
return HdfsServerConstants.INVALID_TXID;
9391
}
9492
return log.getStartTxId();
95-
}
96-
};
93+
};
9794
}

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,7 @@
3737
import java.util.concurrent.TimeoutException;
3838
import java.util.concurrent.atomic.LongAccumulator;
3939

40-
import com.google.common.base.Function;
4140
import com.google.common.base.Joiner;
42-
import com.google.common.collect.Iterables;
4341

4442
import org.slf4j.Logger;
4543
import org.slf4j.LoggerFactory;
@@ -304,15 +302,11 @@ public static void setFailoverConfigurations(Configuration conf, String logicalN
304302
public static <P extends FailoverProxyProvider<?>> void
305303
setFailoverConfigurations(Configuration conf, String logicalName,
306304
List<InetSocketAddress> nnAddresses, Class<P> classFPP) {
307-
setFailoverConfigurations(conf, logicalName,
308-
Iterables.transform(nnAddresses, new Function<InetSocketAddress, String>() {
309-
310-
// transform the inet address to a simple string
311-
@Override
312-
public String apply(InetSocketAddress addr) {
313-
return "hdfs://" + addr.getHostName() + ":" + addr.getPort();
314-
}
315-
}), classFPP);
305+
final List<String> addresses = new ArrayList();
306+
nnAddresses.forEach(
307+
addr -> addresses.add(
308+
"hdfs://" + addr.getHostName() + ":" + addr.getPort()));
309+
setFailoverConfigurations(conf, logicalName, addresses, classFPP);
316310
}
317311

318312
public static <P extends FailoverProxyProvider<?>>

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@
2323
import java.util.Collection;
2424
import java.util.List;
2525
import java.util.Set;
26-
27-
import javax.annotation.Nullable;
26+
import java.util.stream.Collectors;
2827

2928
import org.junit.Assert;
3029

@@ -55,8 +54,6 @@
5554
import org.slf4j.Logger;
5655
import org.slf4j.LoggerFactory;
5756

58-
import com.google.common.base.Function;
59-
import com.google.common.collect.Iterables;
6057
import com.google.common.collect.Lists;
6158
import com.google.common.collect.Sets;
6259

@@ -403,13 +400,10 @@ public static void verifyFileStatuses(List<Path> expectedPaths,
403400
List<FileStatus> fetchedStatuses, final FileSystem localFs) {
404401
Assert.assertEquals(expectedPaths.size(), fetchedStatuses.size());
405402

406-
Iterable<Path> fqExpectedPaths = Iterables.transform(expectedPaths,
407-
new Function<Path, Path>() {
408-
@Override
409-
public Path apply(Path input) {
410-
return localFs.makeQualified(input);
411-
}
412-
});
403+
Iterable<Path> fqExpectedPaths =
404+
expectedPaths.stream().map(
405+
input -> localFs.makeQualified(input)).collect(Collectors.toList());
406+
413407

414408
Set<Path> expectedPathSet = Sets.newHashSet(fqExpectedPaths);
415409
for (FileStatus fileStatus : fetchedStatuses) {
@@ -424,13 +418,10 @@ public Path apply(Path input) {
424418

425419

426420
private void verifySplits(List<String> expected, List<InputSplit> splits) {
427-
Iterable<String> pathsFromSplits = Iterables.transform(splits,
428-
new Function<InputSplit, String>() {
429-
@Override
430-
public String apply(@Nullable InputSplit input) {
431-
return ((FileSplit) input).getPath().toString();
432-
}
433-
});
421+
Iterable<String> pathsFromSplits =
422+
splits.stream().map(
423+
input-> ((FileSplit) input).getPath().toString())
424+
.collect(Collectors.toList());
434425

435426
Set<String> expectedSet = Sets.newHashSet(expected);
436427
for (String splitPathString : pathsFromSplits) {

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import java.util.HashSet;
2323
import java.util.List;
2424
import java.util.Set;
25-
2625
import org.apache.commons.lang3.Range;
2726
import org.apache.hadoop.classification.InterfaceAudience.Private;
2827
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -35,8 +34,6 @@
3534
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
3635
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder;
3736

38-
import com.google.common.base.Function;
39-
import com.google.common.collect.Iterables;
4037
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
4138

4239
@Private
@@ -88,13 +85,8 @@ private void mergeLocalToBuilder() {
8885
}
8986
if (applicationStates != null && !applicationStates.isEmpty()) {
9087
builder.clearApplicationStates();
91-
builder.addAllApplicationStates(Iterables.transform(applicationStates,
92-
new Function<YarnApplicationState, YarnApplicationStateProto>() {
93-
@Override
94-
public YarnApplicationStateProto apply(YarnApplicationState input) {
95-
return ProtoUtils.convertToProtoFormat(input);
96-
}
97-
}));
88+
applicationStates.forEach(input ->
89+
builder.addApplicationStates(ProtoUtils.convertToProtoFormat(input)));
9890
}
9991
if (applicationTags != null && !applicationTags.isEmpty()) {
10092
builder.clearApplicationTags();

0 commit comments

Comments
 (0)