Skip to content

Commit 6a96c3b

Browse files
author
jinxing
committed
fix log
1 parent 2b89166 commit 6a96c3b

File tree

2 files changed

+24
-24
lines changed

2 files changed

+24
-24
lines changed

core/src/main/java/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriter.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -210,19 +210,19 @@ public void write(Iterator<Product2<K, V>> records) throws IOException {
210210
} else {
211211
lenDistribution[8]++;
212212
}
213-
String[] ranges = {"[0, 1k)", "[1k, 10k)", "[10k, 100k)", "[100k, 1m)", "[1m, 10m)",
214-
"[10m, 100m)", "[100m, 1g)", "[1g, 10g)", ">10g"};
215-
String[] rangesAndDistribute = new String[9];
216-
for (int j = 0; j < 9; j++) {
217-
rangesAndDistribute[j] = ranges[j] + ":" + lenDistribution[j];
218-
}
219-
logger.debug("For task {}.{} in stage {} (TID {}), the block sizes in MapStatus are " +
220-
"inaccurate (average is {}, {} blocks underestimated, size of underestimated is {})," +
221-
" distribution is {}.", taskContext.partitionId(), taskContext.attemptNumber(),
222-
taskContext.stageId(), taskContext.taskAttemptId(), hc.getAvgSize(),
223-
underestimatedBlocksNum, underestimatedBlocksSize,
224-
String.join(", ", rangesAndDistribute));
225213
}
214+
String[] ranges = {"[0, 1k)", "[1k, 10k)", "[10k, 100k)", "[100k, 1m)", "[1m, 10m)",
215+
"[10m, 100m)", "[100m, 1g)", "[1g, 10g)", ">10g"};
216+
String[] rangesAndDistribute = new String[9];
217+
for (int j = 0; j < 9; j++) {
218+
rangesAndDistribute[j] = ranges[j] + ":" + lenDistribution[j];
219+
}
220+
logger.debug("For task {}.{} in stage {} (TID {}), the block sizes in MapStatus are " +
221+
"inaccurate (average is {}, {} blocks underestimated, size of underestimated is {})," +
222+
" distribution is {}.", taskContext.partitionId(), taskContext.attemptNumber(),
223+
taskContext.stageId(), taskContext.taskAttemptId(), hc.getAvgSize(),
224+
underestimatedBlocksNum, underestimatedBlocksSize,
225+
String.join(", ", rangesAndDistribute));
226226
}
227227
}
228228
}

core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -267,19 +267,19 @@ void closeAndWriteOutput() throws IOException {
267267
} else {
268268
lenDistribution[8]++;
269269
}
270-
String[] ranges = {"[0, 1k)", "[1k, 10k)", "[10k, 100k)", "[100k, 1m)", "[1m, 10m)",
271-
"[10m, 100m)", "[100m, 1g)", "[1g, 10g)", ">10g"};
272-
String[] rangesAndDistribute = new String[9];
273-
for (int j = 0; j < 9; j++) {
274-
rangesAndDistribute[j] = ranges[j] + ":" + lenDistribution[j];
275-
}
276-
logger.debug("For task {}.{} in stage {} (TID {}), the block sizes in MapStatus are " +
277-
"inaccurate (average is {}, {} blocks underestimated, size of underestimated is {})," +
278-
" distribution is {}.", taskContext.partitionId(), taskContext.attemptNumber(),
279-
taskContext.stageId(), taskContext.taskAttemptId(), hc.getAvgSize(),
280-
underestimatedBlocksNum, underestimatedBlocksSize,
281-
String.join(", ", rangesAndDistribute));
282270
}
271+
String[] ranges = {"[0, 1k)", "[1k, 10k)", "[10k, 100k)", "[100k, 1m)", "[1m, 10m)",
272+
"[10m, 100m)", "[100m, 1g)", "[1g, 10g)", ">10g"};
273+
String[] rangesAndDistribute = new String[9];
274+
for (int j = 0; j < 9; j++) {
275+
rangesAndDistribute[j] = ranges[j] + ":" + lenDistribution[j];
276+
}
277+
logger.debug("For task {}.{} in stage {} (TID {}), the block sizes in MapStatus are " +
278+
"inaccurate (average is {}, {} blocks underestimated, size of underestimated is {})," +
279+
" distribution is {}.", taskContext.partitionId(), taskContext.attemptNumber(),
280+
taskContext.stageId(), taskContext.taskAttemptId(), hc.getAvgSize(),
281+
underestimatedBlocksNum, underestimatedBlocksSize,
282+
String.join(", ", rangesAndDistribute));
283283
}
284284
}
285285
}

0 commit comments

Comments
 (0)