Skip to content

Commit 85a3532

Browse files
authored
HDFS-15970. Print network topology on the web (#2896)
1 parent e66d7d9 commit 85a3532

File tree

5 files changed

+395
-2
lines changed

5 files changed

+395
-2
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ void start() throws IOException {
166166

167167
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
168168
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
169-
setupServlets(httpServer, conf);
169+
setupServlets(httpServer);
170170
httpServer.start();
171171

172172
int connIdx = 0;
@@ -243,7 +243,7 @@ void setAliasMap(InMemoryAliasMap aliasMap) {
243243
httpServer.setAttribute(ALIASMAP_ATTRIBUTE_KEY, aliasMap);
244244
}
245245

246-
private static void setupServlets(HttpServer2 httpServer, Configuration conf) {
246+
private static void setupServlets(HttpServer2 httpServer) {
247247
httpServer.addInternalServlet("startupProgress",
248248
StartupProgressServlet.PATH_SPEC, StartupProgressServlet.class);
249249
httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class,
@@ -253,6 +253,8 @@ private static void setupServlets(HttpServer2 httpServer, Configuration conf) {
253253
httpServer.addInternalServlet(IsNameNodeActiveServlet.SERVLET_NAME,
254254
IsNameNodeActiveServlet.PATH_SPEC,
255255
IsNameNodeActiveServlet.class);
256+
httpServer.addInternalServlet("topology",
257+
NetworkTopologyServlet.PATH_SPEC, NetworkTopologyServlet.class);
256258
}
257259

258260
static FSImage getFsImageFromContext(ServletContext context) {
Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.server.namenode;
19+
20+
import com.fasterxml.jackson.core.JsonFactory;
21+
import com.fasterxml.jackson.core.JsonGenerator;
22+
import org.apache.hadoop.classification.InterfaceAudience;
23+
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
24+
import org.apache.hadoop.net.NetUtils;
25+
import org.apache.hadoop.net.Node;
26+
import org.apache.hadoop.net.NodeBase;
27+
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
28+
import org.apache.hadoop.util.StringUtils;
29+
30+
import javax.servlet.ServletContext;
31+
import javax.servlet.http.HttpServletRequest;
32+
import javax.servlet.http.HttpServletResponse;
33+
import javax.ws.rs.core.HttpHeaders;
34+
import java.io.IOException;
35+
import java.io.PrintStream;
36+
import java.util.ArrayList;
37+
import java.util.Collections;
38+
import java.util.HashMap;
39+
import java.util.List;
40+
import java.util.Map;
41+
import java.util.TreeSet;
42+
43+
/**
44+
* A servlet to print out the network topology.
45+
*/
46+
@InterfaceAudience.Private
47+
public class NetworkTopologyServlet extends DfsServlet {
48+
49+
public static final String PATH_SPEC = "/topology";
50+
51+
protected static final String FORMAT_JSON = "json";
52+
protected static final String FORMAT_TEXT = "text";
53+
54+
@Override
55+
public void doGet(HttpServletRequest request, HttpServletResponse response)
56+
throws IOException {
57+
final ServletContext context = getServletContext();
58+
59+
String format = parseAcceptHeader(request);
60+
if (FORMAT_TEXT.equals(format)) {
61+
response.setContentType("text/plain; charset=UTF-8");
62+
} else if (FORMAT_JSON.equals(format)) {
63+
response.setContentType("application/json; charset=UTF-8");
64+
}
65+
66+
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
67+
BlockManager bm = nn.getNamesystem().getBlockManager();
68+
List<Node> leaves = bm.getDatanodeManager().getNetworkTopology()
69+
.getLeaves(NodeBase.ROOT);
70+
71+
try (PrintStream out = new PrintStream(
72+
response.getOutputStream(), false, "UTF-8")) {
73+
printTopology(out, leaves, format);
74+
} catch (Throwable t) {
75+
String errMsg = "Print network topology failed. "
76+
+ StringUtils.stringifyException(t);
77+
response.sendError(HttpServletResponse.SC_GONE, errMsg);
78+
throw new IOException(errMsg);
79+
} finally {
80+
response.getOutputStream().close();
81+
}
82+
}
83+
84+
/**
85+
* Display each rack and the nodes assigned to that rack, as determined
86+
* by the NameNode, in a hierarchical manner. The nodes and racks are
87+
* sorted alphabetically.
88+
*
89+
* @param stream print stream
90+
* @param leaves leaves nodes under base scope
91+
* @param format the response format
92+
*/
93+
public void printTopology(PrintStream stream, List<Node> leaves,
94+
String format) throws BadFormatException, IOException {
95+
if (leaves.isEmpty()) {
96+
stream.print("No DataNodes");
97+
return;
98+
}
99+
100+
// Build a map of rack -> nodes
101+
Map<String, TreeSet<String>> tree = new HashMap<>();
102+
for(Node dni : leaves) {
103+
String location = dni.getNetworkLocation();
104+
String name = dni.getName();
105+
106+
tree.putIfAbsent(location, new TreeSet<>());
107+
tree.get(location).add(name);
108+
}
109+
110+
// Sort the racks (and nodes) alphabetically, display in order
111+
ArrayList<String> racks = new ArrayList<>(tree.keySet());
112+
Collections.sort(racks);
113+
114+
if (FORMAT_JSON.equals(format)) {
115+
printJsonFormat(stream, tree, racks);
116+
} else if (FORMAT_TEXT.equals(format)) {
117+
printTextFormat(stream, tree, racks);
118+
} else {
119+
throw new BadFormatException("Bad format: " + format);
120+
}
121+
}
122+
123+
private void printJsonFormat(PrintStream stream, Map<String,
124+
TreeSet<String>> tree, ArrayList<String> racks) throws IOException {
125+
JsonFactory dumpFactory = new JsonFactory();
126+
JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream);
127+
dumpGenerator.writeStartArray();
128+
129+
for(String r : racks) {
130+
dumpGenerator.writeStartObject();
131+
dumpGenerator.writeFieldName(r);
132+
TreeSet<String> nodes = tree.get(r);
133+
dumpGenerator.writeStartArray();
134+
135+
for(String n : nodes) {
136+
dumpGenerator.writeStartObject();
137+
dumpGenerator.writeStringField("ip", n);
138+
String hostname = NetUtils.getHostNameOfIP(n);
139+
if(hostname != null) {
140+
dumpGenerator.writeStringField("hostname", hostname);
141+
}
142+
dumpGenerator.writeEndObject();
143+
}
144+
dumpGenerator.writeEndArray();
145+
dumpGenerator.writeEndObject();
146+
}
147+
dumpGenerator.writeEndArray();
148+
dumpGenerator.flush();
149+
150+
if (!dumpGenerator.isClosed()) {
151+
dumpGenerator.close();
152+
}
153+
}
154+
155+
private void printTextFormat(PrintStream stream, Map<String,
156+
TreeSet<String>> tree, ArrayList<String> racks) {
157+
for(String r : racks) {
158+
stream.println("Rack: " + r);
159+
TreeSet<String> nodes = tree.get(r);
160+
161+
for(String n : nodes) {
162+
stream.print(" " + n);
163+
String hostname = NetUtils.getHostNameOfIP(n);
164+
if(hostname != null) {
165+
stream.print(" (" + hostname + ")");
166+
}
167+
stream.println();
168+
}
169+
stream.println();
170+
}
171+
}
172+
173+
@VisibleForTesting
174+
static String parseAcceptHeader(HttpServletRequest request) {
175+
String format = request.getHeader(HttpHeaders.ACCEPT);
176+
return format != null && format.contains(FORMAT_JSON) ?
177+
FORMAT_JSON : FORMAT_TEXT;
178+
}
179+
180+
public static class BadFormatException extends Exception {
181+
private static final long serialVersionUID = 1L;
182+
183+
public BadFormatException(String msg) {
184+
super(msg);
185+
}
186+
}
187+
}

hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@
5252
<li><a href="jmx">Metrics</a></li>
5353
<li><a href="conf">Configuration</a></li>
5454
<li><a href="stacks">Process Thread Dump</a></li>
55+
<li><a href="topology">Network Topology</a></li>
5556
</ul>
5657
</li>
5758
</ul>

hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@
4848
<li><a href="jmx">Metrics</a></li>
4949
<li><a href="conf">Configuration</a></li>
5050
<li><a href="stacks">Process Thread Dump</a></li>
51+
<li><a href="topology">Network Topology</a></li>
5152
</ul>
5253
</li>
5354
</ul>

0 commit comments

Comments
 (0)