Skip to content

Commit f35d716

Browse files
author
Ritesh Garg
committed
Merge branch 'RestartingTest' into HDFS-17299-Passing
2 parents 0db222c + 21eeb9a commit f35d716

File tree

2 files changed

+6
-11
lines changed

2 files changed

+6
-11
lines changed

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@
8787
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
8888
import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener;
8989
import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalNotification;
90+
import org.apache.hadoop.thirdparty.com.google.common.collect.Iterables;
9091

9192
import org.slf4j.Logger;
9293
import org.slf4j.LoggerFactory;
@@ -414,10 +415,6 @@ synchronized void markFirstNodeIfNotMarked() {
414415
}
415416

416417
synchronized void adjustState4RestartingNode() {
417-
if (restartingNodeIndex == -1) {
418-
return;
419-
}
420-
421418
// Just took care of a node error while waiting for a node restart
422419
if (restartingNodeIndex >= 0) {
423420
// If the error came from a node further away than the restarting
@@ -1816,7 +1813,7 @@ DatanodeInfo[] getExcludedNodes() {
18161813
* Must get block ID and the IDs of the destinations from the namenode.
18171814
* Returns the list of target datanodes.
18181815
*/
1819-
protected LocatedBlock setupPipelineForCreate() throws IOException {
1816+
protected void setupPipelineForCreate() throws IOException {
18201817
LocatedBlock lb;
18211818
DatanodeInfo[] nodes;
18221819
StorageType[] nextStorageTypes;
@@ -1849,7 +1846,8 @@ protected LocatedBlock setupPipelineForCreate() throws IOException {
18491846
dfsClient.namenode.abandonBlock(block.getCurrentBlock(),
18501847
stat.getFileId(), src, dfsClient.clientName);
18511848
block.setCurrentBlock(null);
1852-
final DatanodeInfo badNode = nodes[errorState.getBadNodeIndex()];
1849+
final DatanodeInfo badNode =
1850+
errorState.getBadNodeIndex() == -1 ? Iterables.getLast(failed) : nodes[errorState.getBadNodeIndex()];
18531851
LOG.warn("Excluding datanode " + badNode);
18541852
excludedNodes.put(badNode, badNode);
18551853
setPipeline(null, null, null);
@@ -1859,7 +1857,6 @@ protected LocatedBlock setupPipelineForCreate() throws IOException {
18591857
if (!success) {
18601858
throw new IOException("Unable to create new block.");
18611859
}
1862-
return lb;
18631860
}
18641861

18651862
// connects to the first datanode in the pipeline

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StripedDataStreamer.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ private LocatedBlock getFollowingBlock() throws IOException {
9090
}
9191

9292
@Override
93-
protected LocatedBlock setupPipelineForCreate() throws IOException {
93+
protected void setupPipelineForCreate() throws IOException {
9494
boolean success;
9595
LocatedBlock lb = getFollowingBlock();
9696
block.setCurrentBlock(lb.getBlock());
@@ -101,7 +101,6 @@ protected LocatedBlock setupPipelineForCreate() throws IOException {
101101
DatanodeInfo[] nodes = lb.getLocations();
102102
StorageType[] storageTypes = lb.getStorageTypes();
103103
String[] storageIDs = lb.getStorageIDs();
104-
setPipeline(lb);
105104
// Connect to the DataNode. If fail the internal error state will be set.
106105
success = createBlockOutputStream(nodes, storageTypes, storageIDs, 0L,
107106
false);
@@ -111,10 +110,9 @@ protected LocatedBlock setupPipelineForCreate() throws IOException {
111110
final DatanodeInfo badNode = nodes[getErrorState().getBadNodeIndex()];
112111
LOG.warn("Excluding datanode " + badNode);
113112
excludedNodes.put(badNode, badNode);
114-
setPipeline(null, null, null);
115113
throw new IOException("Unable to create new block." + this);
116114
}
117-
return lb;
115+
setPipeline(lb);
118116
}
119117

120118
@VisibleForTesting

0 commit comments

Comments
 (0)