Skip to content

Commit

Permalink
Add shuffling xContent to aggregation tests
Browse files Browse the repository at this point in the history
  • Loading branch information
cbuescher committed Apr 5, 2016
1 parent b86d098 commit b01e3f0
Show file tree
Hide file tree
Showing 12 changed files with 82 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class FiltersAggregator extends BucketsAggregator {
public static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket");
public static final ParseField OTHER_BUCKET_KEY_FIELD = new ParseField("other_bucket_key");

public static class KeyedFilter implements Writeable<KeyedFilter>, ToXContent {
public static class KeyedFilter implements Writeable<KeyedFilter>, ToXContent, Comparable<KeyedFilter> {

static final KeyedFilter PROTOTYPE = new KeyedFilter("", EmptyQueryBuilder.PROTOTYPE);
private final String key;
Expand Down Expand Up @@ -122,6 +122,11 @@ public boolean equals(Object obj) {
return Objects.equals(key, other.key)
&& Objects.equals(filter, other.filter);
}

@Override
public int compareTo(KeyedFilter o) {
return this.key.compareTo(o.key);
}
}

private final String[] keys;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,15 @@
import org.elasticsearch.index.query.EmptyQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
import org.elasticsearch.search.aggregations.support.AggregationContext;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;

Expand All @@ -57,6 +58,8 @@ public FiltersAggregatorBuilder(String name, KeyedFilter... filters) {

private FiltersAggregatorBuilder(String name, List<KeyedFilter> filters) {
super(name, InternalFilters.TYPE);
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request
Collections.sort(filters);
this.filters = filters;
this.keyed = true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,10 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;

public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregatorBuilder> {

Expand All @@ -58,7 +60,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
private HighlightBuilder highlightBuilder;
private List<String> fieldNames;
private List<String> fieldDataFields;
private List<ScriptField> scriptFields;
private Set<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext;

public TopHitsAggregatorBuilder(String name) {
Expand Down Expand Up @@ -378,7 +380,7 @@ public TopHitsAggregatorBuilder scriptField(String name, Script script, boolean
throw new IllegalArgumentException("scriptField [script] must not be null: [" + name + "]");
}
if (scriptFields == null) {
scriptFields = new ArrayList<>();
scriptFields = new HashSet<>();
}
scriptFields.add(new ScriptField(name, script, ignoreFailure));
return this;
Expand All @@ -389,7 +391,7 @@ public TopHitsAggregatorBuilder scriptFields(List<ScriptField> scriptFields) {
throw new IllegalArgumentException("[scriptFields] must not be null: [" + name + "]");
}
if (this.scriptFields == null) {
this.scriptFields = new ArrayList<>();
this.scriptFields = new HashSet<>();
}
this.scriptFields.addAll(scriptFields);
return this;
Expand All @@ -398,7 +400,7 @@ public TopHitsAggregatorBuilder scriptFields(List<ScriptField> scriptFields) {
/**
* Gets the script fields.
*/
public List<ScriptField> scriptFields() {
public Set<ScriptField> scriptFields() {
return scriptFields;
}

Expand Down Expand Up @@ -541,7 +543,7 @@ protected TopHitsAggregatorBuilder doReadFrom(String name, StreamInput in) throw
factory.highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
if (in.readBoolean()) {
int size = in.readVInt();
List<ScriptField> scriptFields = new ArrayList<>(size);
Set<ScriptField> scriptFields = new HashSet<>(size);
for (int i = 0; i < size; i++) {
scriptFields.add(ScriptField.PROTOTYPE.readFrom(in));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregatorFactory> {

Expand All @@ -54,12 +55,12 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
private final HighlightBuilder highlightBuilder;
private final List<String> fieldNames;
private final List<String> fieldDataFields;
private final List<ScriptField> scriptFields;
private final Set<ScriptField> scriptFields;
private final FetchSourceContext fetchSourceContext;

public TopHitsAggregatorFactory(String name, Type type, int from, int size, boolean explain, boolean version, boolean trackScores,
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, List<String> fieldNames, List<String> fieldDataFields,
List<ScriptField> scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory<?> parent,
Set<ScriptField> scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory<?> parent,
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
super(name, type, context, parent, subFactories, metaData);
this.from = from;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@
public abstract class PipelineAggregatorBuilder<PAB extends PipelineAggregatorBuilder<PAB>> extends ToXContentToBytes
implements NamedWriteable<PipelineAggregatorBuilder<PAB>>, ToXContent {

protected String name;
protected String type;
protected String[] bucketsPaths;
protected final String name;
protected final String type;
protected final String[] bucketsPaths;
protected Map<String, Object> metaData;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,19 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.TreeMap;

public class BucketScriptPipelineAggregatorBuilder extends PipelineAggregatorBuilder<BucketScriptPipelineAggregatorBuilder> {

Expand All @@ -48,7 +49,8 @@ public class BucketScriptPipelineAggregatorBuilder extends PipelineAggregatorBui
private GapPolicy gapPolicy = GapPolicy.SKIP;

public BucketScriptPipelineAggregatorBuilder(String name, Map<String, String> bucketsPathsMap, Script script) {
super(name, BucketScriptPipelineAggregator.TYPE.name(), bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]));
super(name, BucketScriptPipelineAggregator.TYPE.name(), new TreeMap<>(bucketsPathsMap).values()
.toArray(new String[bucketsPathsMap.size()]));
this.bucketsPathsMap = bucketsPathsMap;
this.script = script;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,18 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptParser;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.TreeMap;

public class BucketSelectorPipelineAggregatorBuilder extends PipelineAggregatorBuilder<BucketSelectorPipelineAggregatorBuilder> {

Expand All @@ -43,10 +44,11 @@ public class BucketSelectorPipelineAggregatorBuilder extends PipelineAggregatorB

private Script script;
private GapPolicy gapPolicy = GapPolicy.SKIP;
private Map<String, String> bucketsPathsMap;
private final Map<String, String> bucketsPathsMap;

public BucketSelectorPipelineAggregatorBuilder(String name, Map<String, String> bucketsPathsMap, Script script) {
super(name, BucketSelectorPipelineAggregator.TYPE.name(), bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]));
super(name, BucketSelectorPipelineAggregator.TYPE.name(), new TreeMap<>(bucketsPathsMap).values()
.toArray(new String[bucketsPathsMap.size()]));
this.bucketsPathsMap = bucketsPathsMap;
this.script = script;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,11 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
Expand Down Expand Up @@ -215,8 +218,13 @@ public static void afterClass() throws Exception {
public void testFromXContent() throws IOException {
AB testAgg = createTestAggregatorBuilder();
AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().addAggregator(testAgg);
String contentString = factoriesBuilder.toString();
XContentParser parser = XContentFactory.xContent(contentString).createParser(contentString);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet());
XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes());
QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,11 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
Expand Down Expand Up @@ -216,9 +219,14 @@ public static void afterClass() throws Exception {
public void testFromXContent() throws IOException {
AF testAgg = createTestAggregatorFactory();
AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().skipResolveOrder().addPipelineAggregator(testAgg);
String contentString = factoriesBuilder.toString();
logger.info("Content string: {}", contentString);
XContentParser parser = XContentFactory.xContent(contentString).createParser(contentString);
logger.info("Content string: {}", factoriesBuilder);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentBuilder shuffled = shuffleXContent(builder, Collections.emptySet());
XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes());
QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ public void setupSuiteScopeCluster() throws Exception {

public void testSimple() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(
filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))))
filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))))
.execute().actionGet();

assertSearchResponse(response);
Expand All @@ -137,7 +138,8 @@ public void testSimple() throws Exception {
public void testEmptyFilterDeclarations() throws Exception {
QueryBuilder<?> emptyFilter = new BoolQueryBuilder();
SearchResponse response = client().prepareSearch("idx")
.addAggregation(filters("tags", new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1"))))
.addAggregation(filters("tags", randomOrder(new KeyedFilter("all", emptyFilter),
new KeyedFilter("tag1", termQuery("tag", "tag1")))))
.execute().actionGet();

assertSearchResponse(response);
Expand All @@ -154,8 +156,8 @@ public void testEmptyFilterDeclarations() throws Exception {

public void testWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2"))).subAggregation(avg("avg_value").field("value")))
.addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))).subAggregation(avg("avg_value").field("value")))
.execute().actionGet();

assertSearchResponse(response);
Expand Down Expand Up @@ -254,9 +256,9 @@ public void testWithContextBasedSubAggregation() throws Exception {
try {
client().prepareSearch("idx")
.addAggregation(
filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
.subAggregation(avg("avg_value"))
)
filters("tags",
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))).subAggregation(avg("avg_value")))
.execute().actionGet();

fail("expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" +
Expand Down Expand Up @@ -314,8 +316,8 @@ public void testSimpleNonKeyed() throws Exception {

public void testOtherBucket() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(
filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
.otherBucket(true))
filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true))
.execute().actionGet();

assertSearchResponse(response);
Expand All @@ -341,8 +343,8 @@ public void testOtherBucket() throws Exception {

public void testOtherNamedBucket() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2"))).otherBucket(true).otherBucketKey("foobar"))
.addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true).otherBucketKey("foobar"))
.execute().actionGet();

assertSearchResponse(response);
Expand Down Expand Up @@ -397,8 +399,8 @@ public void testOtherNonKeyed() throws Exception {

public void testOtherWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2"))).otherBucket(true)
.addAggregation(filters("tags", randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")),
new KeyedFilter("tag2", termQuery("tag", "tag2")))).otherBucket(true)
.subAggregation(avg("avg_value").field("value")))
.execute().actionGet();

Expand Down Expand Up @@ -485,4 +487,14 @@ public void testEmptyAggregationWithOtherBucket() throws Exception {
assertThat(other.getDocCount(), is(0L));
}

private static KeyedFilter[] randomOrder(KeyedFilter... filters) {
for (int i = 0; i < filters.length; i++) {
KeyedFilter tmp = filters[i];
int index = randomInt(filters.length - 1);
filters[i] = filters[index];
filters[index] = tmp;
}
return filters;
}

}

0 comments on commit b01e3f0

Please sign in to comment.