Skip to content

Commit

Permalink
HBASE-28775 Change the output of DatanodeInfo in the log to the hostn…
Browse files Browse the repository at this point in the history
…ame of the datanode (#6148)

Co-authored-by: wangxin <wangxin9702@gmail.com>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: Nihal Jain <nihaljain@apache.org>
Reviewed-by: Vineet Kumar Maheshwari <vineet.4008@gmail.com>
Reviewed-by: guluo <lupeng_nwpu@qq.com>
  • Loading branch information
1458451310 and wangxin authored Sep 6, 2024
1 parent b19ee00 commit 241bbaf
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,15 @@
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.crypto.Encryptor;
Expand Down Expand Up @@ -473,8 +475,10 @@ private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem d
Set<DatanodeInfo> toExcludeNodes =
new HashSet<>(excludeDatanodeManager.getExcludeDNs().keySet());
for (int retry = 0;; retry++) {
LOG.debug("When create output stream for {}, exclude list is {}, retry={}", src,
toExcludeNodes, retry);
if (LOG.isDebugEnabled()) {
LOG.debug("When create output stream for {}, exclude list is {}, retry={}", src,
getDataNodeInfo(toExcludeNodes), retry);
}
HdfsFileStatus stat;
try {
stat = FILE_CREATOR.create(namenode, src,
Expand Down Expand Up @@ -620,4 +624,15 @@ static void sleepIgnoreInterrupt(int retry) {
} catch (InterruptedException e) {
}
}

public static String getDataNodeInfo(Collection<DatanodeInfo> datanodeInfos) {
if (datanodeInfos.isEmpty()) {
return "[]";
}
return datanodeInfos.stream()
.map(datanodeInfo -> new StringBuilder().append("(").append(datanodeInfo.getHostName())
.append("/").append(datanodeInfo.getInfoAddr()).append(":")
.append(datanodeInfo.getInfoPort()).append(")").toString())
.collect(Collectors.joining(",", "[", "]"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.commons.lang3.mutable.MutableLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
Expand All @@ -85,6 +86,7 @@
import org.apache.hadoop.hbase.client.ConnectionUtils;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerCall;
Expand Down Expand Up @@ -1105,7 +1107,8 @@ private Map<byte[], List<byte[]>> rollWriterInternal(boolean force) throws IOExc
tellListenersAboutPostLogRoll(oldPath, newPath);
if (LOG.isDebugEnabled()) {
LOG.debug("Create new " + implClassName + " writer with pipeline: "
+ Arrays.toString(getPipeline()));
+ FanOutOneBlockAsyncDFSOutputHelper
.getDataNodeInfo(Arrays.stream(getPipeline()).collect(Collectors.toList())));
}
// We got a new writer, so reset the slow sync count
lastTimeCheckSlowSync = EnvironmentEdgeManager.currentTime();
Expand Down

0 comments on commit 241bbaf

Please sign in to comment.