Skip to content

Commit b96dc48

Browse files
author
lgh
committed
MAPREDUCE-7469 NNBench createControlFiles should use thread pool to improve performance.
1 parent 2f1e155 commit b96dc48

File tree

2 files changed

+64
-2
lines changed
  • hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs

2 files changed

+64
-2
lines changed

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java

Lines changed: 44 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,15 @@
2626
import java.io.InputStreamReader;
2727
import java.io.PrintStream;
2828
import java.text.SimpleDateFormat;
29+
import java.util.ArrayList;
2930
import java.util.Date;
3031
import java.util.Iterator;
32+
import java.util.List;
3133
import java.util.StringTokenizer;
34+
import java.util.concurrent.Callable;
35+
import java.util.concurrent.ExecutionException;
36+
import java.util.concurrent.ExecutorService;
37+
import java.util.concurrent.Future;
3238

3339
import org.apache.hadoop.HadoopIllegalArgumentException;
3440
import org.apache.hadoop.conf.Configuration;
@@ -55,6 +61,7 @@
5561
import org.apache.hadoop.mapred.SequenceFileInputFormat;
5662
import org.apache.hadoop.util.Tool;
5763
import org.apache.hadoop.util.ToolRunner;
64+
import org.apache.hadoop.util.concurrent.HadoopExecutors;
5865
import org.slf4j.Logger;
5966
import org.slf4j.LoggerFactory;
6067

@@ -105,6 +112,8 @@ public class NNBench extends Configured implements Tool {
105112
private static final String OP_RENAME = "rename";
106113
private static final String OP_DELETE = "delete";
107114
private static final int MAX_OPERATION_EXCEPTIONS = 1000;
115+
private ExecutorService executorService =
116+
HadoopExecutors.newFixedThreadPool(2 * Runtime.getRuntime().availableProcessors());
108117

109118
// To display in the format that matches the NN and DN log format
110119
// Example: 2007-10-26 00:01:19,853
@@ -134,27 +143,60 @@ private void cleanupBeforeTestrun() throws IOException {
134143
*
135144
* @throws IOException on error
136145
*/
137-
private void createControlFiles() throws IOException {
146+
private void createControlFiles() throws ExecutionException, InterruptedException {
138147
LOG.info("Creating " + numberOfMaps + " control files");
139148

149+
List<Future<Void>> list = new ArrayList<>();
140150
for (int i = 0; i < numberOfMaps; i++) {
141151
String strFileName = "NNBench_Controlfile_" + i;
142152
Path filePath = new Path(new Path(baseDir, CONTROL_DIR_NAME),
143153
strFileName);
144154

155+
Future<Void> future = executorService.submit(new createControlFile(strFileName, filePath, i));
156+
list.add(future);
157+
}
158+
159+
for (int i = 0; i < list.size(); i++) {
160+
try {
161+
list.get(i).get();
162+
} catch (InterruptedException | ExecutionException e) {
163+
LOG.error("Creating control files Error.");
164+
throw e;
165+
}
166+
}
167+
168+
executorService.shutdown();
169+
}
170+
171+
private class createControlFile implements Callable<Void> {
172+
String strFileName;
173+
Path filePath;
174+
int order;
175+
176+
public createControlFile(String strFileName, Path filePath, int order) {
177+
this.strFileName = strFileName;
178+
this.filePath = filePath;
179+
this.order = order;
180+
}
181+
182+
@Override
183+
public Void call() throws Exception {
145184
SequenceFile.Writer writer = null;
146185
try {
147186
writer = SequenceFile.createWriter(getConf(), Writer.file(filePath),
148187
Writer.keyClass(Text.class), Writer.valueClass(LongWritable.class),
149188
Writer.compression(CompressionType.NONE));
150-
writer.append(new Text(strFileName), new LongWritable(i));
189+
writer.append(new Text(strFileName), new LongWritable(order));
151190
} finally {
152191
if (writer != null) {
153192
writer.close();
154193
}
155194
}
195+
return null;
156196
}
197+
157198
}
199+
158200
/**
159201
* Display version
160202
*/

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import java.io.IOException;
2626

2727
import org.apache.hadoop.conf.Configuration;
28+
import org.apache.hadoop.fs.FileStatus;
2829
import org.apache.hadoop.fs.Path;
2930
import org.apache.hadoop.mapred.HadoopTestCase;
3031
import org.apache.hadoop.mapred.JobConf;
@@ -37,6 +38,8 @@ public class TestNNBench extends HadoopTestCase {
3738
private static final String BASE_DIR =
3839
new File(System.getProperty("test.build.data", "build/test/data"),
3940
"NNBench").getAbsolutePath();
41+
private static final String CONTROL_DIR_NAME = "control";
42+
4043

4144
public TestNNBench() throws IOException {
4245
super(LOCAL_MR, LOCAL_FS, 1, 1);
@@ -74,6 +77,15 @@ public void testNNBenchCreateAndRename() throws Exception {
7477
getFileSystem().exists(renamedPath));
7578
}
7679

80+
@Test(timeout = 30000)
81+
public void testNNBenchCreateControlFilesWithPool() throws Exception {
82+
runNNBench(createJobConf(), "create_write", BASE_DIR, "5");
83+
Path path = new Path(BASE_DIR, CONTROL_DIR_NAME);
84+
85+
FileStatus[] fileStatuses = getFileSystem().listStatus(path);
86+
assertEquals(5, fileStatuses.length);
87+
}
88+
7789
@Test(timeout = 30000)
7890
public void testNNBenchCrossCluster() throws Exception {
7991
MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(new JobConf())
@@ -97,6 +109,14 @@ private void runNNBench(Configuration conf, String operation, String baseDir)
97109
assertEquals(0, ToolRunner.run(conf, new NNBench(), genArgs));
98110
}
99111

112+
private void runNNBench(Configuration conf, String operation, String baseDir, String numMaps)
113+
throws Exception {
114+
String[] genArgs = {"-operation", operation, "-baseDir", baseDir,
115+
"-startTime", "" + (Time.now() / 1000 + 3), "-blockSize", "1024", "-maps", numMaps};
116+
117+
assertEquals(0, ToolRunner.run(conf, new NNBench(), genArgs));
118+
}
119+
100120
private void runNNBench(Configuration conf, String operation)
101121
throws Exception {
102122
runNNBench(conf, operation, BASE_DIR);

0 commit comments

Comments
 (0)