Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[7.x] Do not throw exceptions resulting from persisting datafeed timing stats. (#49044) #49050

Merged
merged 1 commit into from
Nov 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,13 @@ public Integer getMaxEmptySearches() {
}

public void finishReportingTimingStats() {
timingStatsReporter.finishReporting();
try {
timingStatsReporter.finishReporting();
} catch (Exception e) {
// We don't want the exception to propagate out of this method as it can leave the datafeed in the "stopping" state forever.
// Since persisting datafeed timing stats is not critical, we just log a warning here.
LOGGER.warn("[{}] Datafeed timing stats could not be reported due to: {}", jobId, e);
}
}

Long runLookBack(long startTime, Long endTime) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ public void stop(String source, TimeValue timeout, Exception e, boolean autoClos
acquired = datafeedJobLock.tryLock(timeout.millis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
} finally {
} finally { // It is crucial that none of the calls this "finally" block makes throws an exception for minor problems.
logger.info("[{}] stopping datafeed [{}] for job [{}], acquired [{}]...", source, datafeedId,
datafeedJob.getJobId(), acquired);
runningDatafeedsOnThisNode.remove(allocationId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -60,6 +61,7 @@
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.same;
import static org.mockito.Mockito.atMost;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
Expand Down Expand Up @@ -454,6 +456,15 @@ public void testFlushAnalysisProblemIsConflict() {
assertThat(analysisProblemException.shouldStop, is(true));
}

public void testFinishReportingTimingStats() {
doThrow(new EsRejectedExecutionException()).when(timingStatsReporter).finishReporting();

long frequencyMs = 100;
long queryDelayMs = 1000;
DatafeedJob datafeedJob = createDatafeedJob(frequencyMs, queryDelayMs, 1000, -1, randomBoolean());
datafeedJob.finishReportingTimingStats();
}

private DatafeedJob createDatafeedJob(long frequencyMs, long queryDelayMs, long latestFinalBucketEndTimeMs,
long latestRecordTimeMs, boolean haveSeenDataPreviously) {
Supplier<Long> currentTimeSupplier = () -> currentTime;
Expand Down