Skip to content

Commit

Permalink
Deprecte Rounding#round
Browse files Browse the repository at this point in the history
This deprecates `Rounding#round` and `Rounding#nextRoundingValue` in
favor of calling
```
Rounding.Prepared prepared = rounding.prepare(min, max);
...
prepared.round(val)

``` because it is always going to be faster to prepare once. There
are going to be some cases where we won't know what to prepare *for*
and in those cases you can call `prepareForUnknown` and stil be faster
than calling the deprecated method over and over and over again.

Ultimately, this is important because it doesn't look like there is an
easy way to cache `Rounding.Prepared` or any of its precursors like
`LocalTimeOffset.Lookup`. Instead, we can just build it at most once per
request.

Relates to elastic#56124
  • Loading branch information
nik9000 committed Jun 8, 2020
1 parent 3ea2e64 commit 6e0e773
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 38 deletions.
8 changes: 4 additions & 4 deletions server/src/main/java/org/elasticsearch/common/Rounding.java
Original file line number Diff line number Diff line change
Expand Up @@ -240,9 +240,9 @@ public interface Prepared {

/**
* Rounds the given value.
* <p>
* Prefer {@link #prepare(long, long)} if rounding many values.
* @deprecated Prefer {@link #prepare} and then {@link Prepared#round(long)}
*/
@Deprecated
public final long round(long utcMillis) {
return prepare(utcMillis, utcMillis).round(utcMillis);
}
Expand All @@ -252,9 +252,9 @@ public final long round(long utcMillis) {
* {@link #round(long)}, returns the next rounding value. For
* example, with interval based rounding, if the interval is
* 3, {@code nextRoundValue(6) = 9}.
* <p>
* Prefer {@link #prepare(long, long)} if rounding many values.
* @deprecated Prefer {@link #prepare} and then {@link Prepared#nextRoundingValue(long)}
*/
@Deprecated
public final long nextRoundingValue(long utcMillis) {
return prepare(utcMillis, utcMillis).nextRoundingValue(utcMillis);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,16 @@ public class RoundingDuelTests extends ESTestCase {
public void testDuellingImplementations() {
org.elasticsearch.common.Rounding.DateTimeUnit randomDateTimeUnit =
randomFrom(org.elasticsearch.common.Rounding.DateTimeUnit.values());
org.elasticsearch.common.Rounding rounding;
org.elasticsearch.common.Rounding.Prepared rounding;
Rounding roundingJoda;

if (randomBoolean()) {
rounding = org.elasticsearch.common.Rounding.builder(randomDateTimeUnit).timeZone(ZoneOffset.UTC).build();
rounding = org.elasticsearch.common.Rounding.builder(randomDateTimeUnit).timeZone(ZoneOffset.UTC).build().prepareForUnknown();
DateTimeUnit dateTimeUnit = DateTimeUnit.resolve(randomDateTimeUnit.getId());
roundingJoda = Rounding.builder(dateTimeUnit).timeZone(DateTimeZone.UTC).build();
} else {
TimeValue interval = timeValue();
rounding = org.elasticsearch.common.Rounding.builder(interval).timeZone(ZoneOffset.UTC).build();
rounding = org.elasticsearch.common.Rounding.builder(interval).timeZone(ZoneOffset.UTC).build().prepareForUnknown();
roundingJoda = Rounding.builder(interval).timeZone(DateTimeZone.UTC).build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut

int roundingIndex = reduced.getBucketInfo().roundingIdx;
RoundingInfo roundingInfo = AutoDateHistogramAggregationBuilder.buildRoundings(null, null)[roundingIndex];
Rounding.Prepared prepared = roundingInfo.rounding.prepare(lowest, highest);

long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
int innerIntervalIndex = 0;
Expand Down Expand Up @@ -185,7 +186,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
} else {
do {
innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex];
int bucketCountAtInterval = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse);
int bucketCountAtInterval = getBucketCount(lowest, highest, prepared, innerIntervalToUse);
if (bucketCountAtInterval == reduced.getBuckets().size()) {
break;
}
Expand All @@ -199,11 +200,11 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation));
Map<Instant, Long> expectedCounts = new TreeMap<>();
if (totalBucketConut > 0) {
long keyForBucket = roundingInfo.rounding.round(lowest);
while (keyForBucket <= roundingInfo.rounding.round(highest)) {
long keyForBucket = prepared.round(lowest);
while (keyForBucket <= prepared.round(highest)) {
long nextKey = keyForBucket;
for (int i = 0; i < innerIntervalToUse; i++) {
nextKey = roundingInfo.rounding.nextRoundingValue(nextKey);
nextKey = prepared.nextRoundingValue(nextKey);
}
Instant key = Instant.ofEpochMilli(keyForBucket);
expectedCounts.put(key, 0L);
Expand All @@ -214,7 +215,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut

for (InternalAutoDateHistogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) {
long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
long roundedBucketKey = prepared.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
long docCount = bucket.getDocCount();
if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) {
expectedCounts.compute(key,
Expand All @@ -227,8 +228,8 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut

// If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L);
if (prepared.round(lowest) == prepared.round(highest) && expectedCounts.isEmpty()) {
expectedCounts.put(Instant.ofEpochMilli(prepared.round(lowest)), 0L);
}
}

Expand All @@ -249,12 +250,12 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAut
assertThat(reduced.getInterval(), equalTo(expectedInterval));
}

private int getBucketCount(long min, long max, Rounding rounding, int interval) {
private int getBucketCount(long min, long max, Rounding.Prepared prepared, int interval) {
int bucketCount = 0;
long key = rounding.round(min);
long key = prepared.round(min);
while (key < max) {
for (int i = 0; i < interval; i++) {
key = rounding.nextRoundingValue(key);
key = prepared.nextRoundingValue(key);
}
bucketCount++;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ public void setUp() throws Exception {
long interval = randomIntBetween(1, 3);
intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis();
Rounding rounding = Rounding.builder(TimeValue.timeValueMillis(intervalMillis)).build();
baseMillis = rounding.round(System.currentTimeMillis());
long now = System.currentTimeMillis();
baseMillis = rounding.prepare(now, now).round(now);
if (randomBoolean()) {
minDocCount = randomIntBetween(1, 10);
emptyBucketInfo = null;
Expand Down Expand Up @@ -108,8 +109,12 @@ protected void assertReduced(InternalDateHistogram reduced, List<InternalDateHis
long minBound = -1;
long maxBound = -1;
if (emptyBucketInfo.bounds != null) {
minBound = emptyBucketInfo.rounding.round(emptyBucketInfo.bounds.getMin());
maxBound = emptyBucketInfo.rounding.round(emptyBucketInfo.bounds.getMax());
Rounding.Prepared prepared = emptyBucketInfo.rounding.prepare(
emptyBucketInfo.bounds.getMin(),
emptyBucketInfo.bounds.getMax()
);
minBound = prepared.round(emptyBucketInfo.bounds.getMin());
maxBound = prepared.round(emptyBucketInfo.bounds.getMax());
if (expectedCounts.isEmpty() && minBound <= maxBound) {
expectedCounts.put(minBound, 0L);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ public String getTimeZone() {
/**
* Create the rounding for this date histogram
*/
public Rounding createRounding() {
public Rounding.Prepared createRounding() {
return createRounding(interval.toString(), timeZone);
}

Expand Down Expand Up @@ -363,7 +363,7 @@ public static DateHistogramGroupConfig fromXContent(final XContentParser parser)
return PARSER.parse(parser, null);
}

private static Rounding createRounding(final String expr, final String timeZone) {
private static Rounding.Prepared createRounding(final String expr, final String timeZone) {
Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
final Rounding.Builder rounding;
if (timeUnit != null) {
Expand All @@ -372,6 +372,6 @@ private static Rounding createRounding(final String expr, final String timeZone)
rounding = new Rounding.Builder(TimeValue.parseTimeValue(expr, "createRounding"));
}
rounding.timeZone(ZoneId.of(timeZone, ZoneId.SHORT_IDS));
return rounding.build();
return rounding.build().prepareForUnknown();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -209,13 +209,27 @@ private void writeInterval(Interval interval, StreamOutput out) throws IOExcepti

private final Interval interval;
private final ZoneId timeZone;
private Rounding rounding;
private final Rounding.Prepared rounding;

public DateHistogramGroupSource(String field, ScriptConfig scriptConfig, Interval interval, ZoneId timeZone) {
super(field, scriptConfig);
this.interval = interval;
this.timeZone = timeZone;
rounding = buildRounding();
}

public DateHistogramGroupSource(StreamInput in) throws IOException {
super(in);
this.interval = readInterval(in);
this.timeZone = in.readOptionalZoneId();
// Format was optional in 7.2.x, removed in 7.3+
if (in.getVersion().before(Version.V_7_3_0)) {
in.readOptionalString();
}
rounding = buildRounding();
}

private Rounding.Prepared buildRounding() {
Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
final Rounding.Builder roundingBuilder;
if (timeUnit != null) {
Expand All @@ -227,17 +241,7 @@ public DateHistogramGroupSource(String field, ScriptConfig scriptConfig, Interva
if (timeZone != null) {
roundingBuilder.timeZone(timeZone);
}
this.rounding = roundingBuilder.build();
}

public DateHistogramGroupSource(StreamInput in) throws IOException {
super(in);
this.interval = readInterval(in);
this.timeZone = in.readOptionalZoneId();
// Format was optional in 7.2.x, removed in 7.3+
if (in.getVersion().before(Version.V_7_3_0)) {
in.readOptionalString();
}
return roundingBuilder.build().prepareForUnknown();
}

private static ConstructingObjectParser<DateHistogramGroupSource, Void> createParser(boolean lenient) {
Expand Down Expand Up @@ -296,7 +300,7 @@ public ZoneId getTimeZone() {
return timeZone;
}

public Rounding getRounding() {
Rounding.Prepared getRounding() {
return rounding;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ public void testSimpleDateHistoWithDelay() throws Exception {
asMap("the_histo", now)
)
);
final Rounding rounding = dateHistoConfig.createRounding();
final Rounding.Prepared rounding = dateHistoConfig.createRounding();
executeTestCase(dataset, job, now, (resp) -> {
assertThat(resp.size(), equalTo(3));
IndexRequest request = resp.get(0);
Expand Down Expand Up @@ -338,7 +338,7 @@ public void testSimpleDateHistoWithOverlappingDelay() throws Exception {
asMap("the_histo", now)
)
);
final Rounding rounding = dateHistoConfig.createRounding();
final Rounding.Prepared rounding = dateHistoConfig.createRounding();
executeTestCase(dataset, job, now, (resp) -> {
assertThat(resp.size(), equalTo(2));
IndexRequest request = resp.get(0);
Expand Down

0 comments on commit 6e0e773

Please sign in to comment.