Skip to content

Commit

Permalink
Add extra round trip to aggs tests
Browse files Browse the repository at this point in the history
This tries to automatically detect problems seriealization aggregation
results by round tripping the results in our usual `AggregatorTestCase`.
It's "free" testing in that we already have the tests written and we'll
get round trip testing "on the side". But it's kind of sneaky because we
aren't *trying* to test serialization here. So they failures can be
surprising. But surprising test failures are better than bugs. At least
that is what I tell myself so I can sleep at night.

Closes elastic#73680
  • Loading branch information
nik9000 committed Oct 21, 2021
1 parent 7c0dbe6 commit 2f48d3e
Show file tree
Hide file tree
Showing 14 changed files with 188 additions and 58 deletions.
20 changes: 20 additions & 0 deletions server/src/main/java/org/elasticsearch/search/DocValueFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -317,6 +317,26 @@ public double parseDouble(String value, boolean roundUp, LongSupplier now) {
public String toString() {
return "DocValueFormat.DateTime(" + formatter + ", " + timeZone + ", " + resolution + ")";
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DateTime that = (DateTime) o;
return formatter.equals(that.formatter)
&& timeZone.equals(that.timeZone)
&& resolution == that.resolution
&& formatSortValues == that.formatSortValues;
}

@Override
public int hashCode() {
return Objects.hash(formatter, timeZone, resolution, formatSortValues);
}
}

DocValueFormat GEOHASH = GeoHashDocValueFormat.INSTANCE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,15 +134,14 @@ public boolean equals(Object o) {
}

Bucket<?> that = (Bucket<?>) o;
return bucketOrd == that.bucketOrd
&& Double.compare(that.score, score) == 0
return Double.compare(that.score, score) == 0
&& Objects.equals(aggregations, that.aggregations)
&& Objects.equals(format, that.format);
}

@Override
public int hashCode() {
return Objects.hash(getClass(), bucketOrd, aggregations, score, format);
return Objects.hash(getClass(), aggregations, score, format);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,17 +142,23 @@ public boolean equals(Object obj) {
return false;
}
Bucket<?> that = (Bucket<?>) obj;
// No need to take format and showDocCountError, they are attributes
// of the parent terms aggregation object that are only copied here
// for serialization purposes
if (showDocCountError && docCountError != that.docCountError) {
/*
* docCountError doesn't matter if not showing it and
* serialization sets it to -1 no matter what it was
* before.
*/
return false;
}
return Objects.equals(docCount, that.docCount)
&& Objects.equals(docCountError, that.docCountError)
&& Objects.equals(showDocCountError, that.showDocCountError)
&& Objects.equals(format, that.format)
&& Objects.equals(aggregations, that.aggregations);
}

@Override
public int hashCode() {
return Objects.hash(getClass(), docCount, docCountError, aggregations);
return Objects.hash(getClass(), docCount, format, showDocCountError, showDocCountError ? docCountError : -1, aggregations);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th

@Override
public int hashCode() {
return Objects.hash(super.hashCode(), counts.hashCode(0));
return Objects.hash(super.hashCode(), counts == null ? 0 : counts.hashCode(0));
}

@Override
Expand All @@ -111,6 +111,9 @@ public boolean equals(Object obj) {
if (super.equals(obj) == false) return false;

InternalCardinality other = (InternalCardinality) obj;
if (counts == null) {
return other.counts == null;
}
return counts.equals(0, other.counts, 0);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,6 @@ public boolean equals(Object obj) {
ScoreDoc otherDoc = other.topDocs.topDocs.scoreDocs[d];
if (thisDoc.doc != otherDoc.doc) return false;
if (Double.compare(thisDoc.score, otherDoc.score) != 0) return false;
if (thisDoc.shardIndex != otherDoc.shardIndex) return false;
if (thisDoc instanceof FieldDoc) {
if (false == (otherDoc instanceof FieldDoc)) return false;
FieldDoc thisFieldDoc = (FieldDoc) thisDoc;
Expand All @@ -231,7 +230,6 @@ public int hashCode() {
ScoreDoc doc = topDocs.topDocs.scoreDocs[d];
hashCode = 31 * hashCode + doc.doc;
hashCode = 31 * hashCode + Float.floatToIntBits(doc.score);
hashCode = 31 * hashCode + doc.shardIndex;
if (doc instanceof FieldDoc) {
FieldDoc fieldDoc = (FieldDoc) doc;
hashCode = 31 * hashCode + Arrays.hashCode(fieldDoc.fields);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ protected InternalAutoDateHistogram createTestInstance(String name, Map<String,
randomLongBetween(0, utcMillis("2050-01-01")),
roundingInfos,
roundingIndex,
randomNumericDocValueFormat()
randomDateDocValueFormat()
);
}

Expand Down Expand Up @@ -111,7 +111,7 @@ protected List<InternalAutoDateHistogram> randomResultsToReduce(String name, int
long startingDate = randomLongBetween(0, utcMillis("2050-01-01"));
RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
int roundingIndex = between(0, roundingInfos.length - 1);
DocValueFormat format = randomNumericDocValueFormat();
DocValueFormat format = randomDateDocValueFormat();
List<InternalAutoDateHistogram> result = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
long thisResultStart = startingDate;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
import org.elasticsearch.common.Rounding.DateTimeUnit;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper.Resolution;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;

import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
Expand All @@ -41,7 +43,7 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
public void setUp() throws Exception {
super.setUp();
keyed = randomBoolean();
format = randomNumericDocValueFormat();
format = randomDateDocValueFormat();
// in order for reduction to work properly (and be realistic) we need to use the same interval, minDocCount, emptyBucketInfo
// and base in all randomly created aggs as part of the same test run. This is particularly important when minDocCount is
// set to 0 as empty buckets need to be added to fill the holes.
Expand Down Expand Up @@ -69,6 +71,26 @@ public void setUp() throws Exception {

@Override
protected InternalDateHistogram createTestInstance(String name, Map<String, Object> metadata, InternalAggregations aggregations) {
return createTestInstance(name, metadata, aggregations, format);
}

@Override
protected InternalDateHistogram createTestInstanceForXContent(String name, Map<String, Object> metadata, InternalAggregations subAggs) {
// We have to force a format that won't throw away precision and cause duplicate fields
DocValueFormat xContentCompatibleFormat = new DocValueFormat.DateTime(
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
ZoneOffset.UTC,
Resolution.MILLISECONDS
);
return createTestInstance(name, metadata, subAggs, xContentCompatibleFormat);
}

private InternalDateHistogram createTestInstance(
String name,
Map<String, Object> metadata,
InternalAggregations aggregations,
DocValueFormat format
) {
int nbBuckets = randomNumberOfBuckets();
List<InternalDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
// avoid having different random instance start from exactly the same base
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
@Override
public void setUp() throws Exception {
super.setUp();
format = randomNumericDocValueFormat();
format = randomDateDocValueFormat();

Function<ZonedDateTime, ZonedDateTime> interval = randomFrom(
dateTime -> dateTime.plusSeconds(1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -433,11 +433,11 @@ public void testUnmappedWithBadMissingField() {
public void testSubAggCollectsFromSingleBucketIfOneRange() throws IOException {
RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test").field(NUMBER_FIELD_NAME)
.addRange(0d, 10d)
.subAggregation(aggCardinality("c"));
.subAggregation(aggCardinalityUpperBound("c"));

simpleTestCase(aggregationBuilder, new MatchAllDocsQuery(), range -> {
List<? extends InternalRange.Bucket> ranges = range.getBuckets();
InternalAggCardinality pc = ranges.get(0).getAggregations().get("c");
InternalAggCardinalityUpperBound pc = ranges.get(0).getAggregations().get("c");
assertThat(pc.cardinality(), equalTo(CardinalityUpperBound.ONE));
});
}
Expand All @@ -446,11 +446,11 @@ public void testSubAggCollectsFromManyBucketsIfManyRanges() throws IOException {
RangeAggregationBuilder aggregationBuilder = new RangeAggregationBuilder("test").field(NUMBER_FIELD_NAME)
.addRange(0d, 10d)
.addRange(10d, 100d)
.subAggregation(aggCardinality("c"));
.subAggregation(aggCardinalityUpperBound("c"));

simpleTestCase(aggregationBuilder, new MatchAllDocsQuery(), range -> {
List<? extends InternalRange.Bucket> ranges = range.getBuckets();
InternalAggCardinality pc = ranges.get(0).getAggregations().get("c");
InternalAggCardinalityUpperBound pc = ranges.get(0).getAggregations().get("c");
assertThat(pc.cardinality().map(i -> i), equalTo(2));
pc = ranges.get(1).getAggregations().get("c");
assertThat(pc.cardinality().map(i -> i), equalTo(2));
Expand Down
Loading

0 comments on commit 2f48d3e

Please sign in to comment.