Skip to content

Commit

Permalink
remove handling of 6.1.x versions
Browse files Browse the repository at this point in the history
relates to refactoring initiative elastic#41164.
  • Loading branch information
talevy committed May 9, 2019
1 parent 70eb812 commit 319f53b
Show file tree
Hide file tree
Showing 43 changed files with 98 additions and 322 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -293,17 +293,9 @@ protected void doWriteTo(StreamOutput out) throws IOException {
} else {
out.writeBoolean(false);
}
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
} else {
if (documents.size() > 1) {
throw new IllegalArgumentException("Nodes prior to 6.1.0 cannot accept multiple documents");
}
BytesReference doc = documents.isEmpty() ? null : documents.iterator().next();
out.writeOptionalBytesReference(doc);
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
if (documents.isEmpty() == false) {
out.writeEnum(documentXContentType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List<Byt
}
Query filter = null;
if (excludeNestedDocuments) {
filter = Queries.newNonNestedFilter(indexVersion);
filter = Queries.newNonNestedFilter();
}
return new PercolateQuery(name, queryStore, documents, candidateQuery, searcher, filter, verifiedMatchesQuery);
}
Expand All @@ -285,7 +285,7 @@ Tuple<BooleanQuery, Boolean> createCandidateQuery(IndexReader indexReader, Versi
}

BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder();
if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(Version.V_6_1_0)) {
if (canUseMinimumShouldMatchField) {
LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name());
for (BytesRef extractedTerm : extractedTerms) {
subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm)));
Expand Down Expand Up @@ -471,9 +471,7 @@ void processQuery(Query query, ParseContext context) {
for (IndexableField field : fields) {
context.doc().add(field);
}
if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) {
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}

static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BitSetIterator;
import org.elasticsearch.Version;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.SearchHit;
Expand Down Expand Up @@ -73,7 +72,7 @@ static void innerHitsExecute(Query mainQuery,
for (PercolateQuery percolateQuery : percolateQueries) {
String fieldName = singlePercolateQuery ? FIELD_NAME_PREFIX : FIELD_NAME_PREFIX + "_" + percolateQuery.getName();
IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
Query nonNestedQuery = Queries.newNonNestedFilter(Version.CURRENT);
Query nonNestedQuery = Queries.newNonNestedFilter();
Weight weight = percolatorIndexSearcher.createWeight(percolatorIndexSearcher.rewrite(nonNestedQuery),
ScoreMode.COMPLETE_NO_SCORES, 1f);
Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.After;
import org.junit.Before;

Expand Down Expand Up @@ -593,7 +594,7 @@ public void testRangeQueries() throws Exception {
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);

Version v = Version.V_6_1_0;
Version v = VersionUtils.randomIndexCompatibleVersion(random());
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")),
Expand Down
20 changes: 0 additions & 20 deletions server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_6_0_1_ID = 6000199;
public static final Version V_6_0_1 =
new Version(V_6_0_1_ID, org.apache.lucene.util.Version.LUCENE_7_0_1);
public static final int V_6_1_0_ID = 6010099;
public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_1_ID = 6010199;
public static final Version V_6_1_1 = new Version(V_6_1_1_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_2_ID = 6010299;
public static final Version V_6_1_2 = new Version(V_6_1_2_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_3_ID = 6010399;
public static final Version V_6_1_3 = new Version(V_6_1_3_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
public static final int V_6_1_4_ID = 6010499;
public static final Version V_6_1_4 = new Version(V_6_1_4_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
// The below version is missing from the 7.3 JAR
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
public static final int V_6_2_0_ID = 6020099;
Expand Down Expand Up @@ -202,16 +192,6 @@ public static Version fromId(int id) {
return V_6_2_1;
case V_6_2_0_ID:
return V_6_2_0;
case V_6_1_4_ID:
return V_6_1_4;
case V_6_1_3_ID:
return V_6_1_3;
case V_6_1_2_ID:
return V_6_1_2;
case V_6_1_1_ID:
return V_6_1_1;
case V_6_1_0_ID:
return V_6_1_0;
case V_6_0_1_ID:
return V_6_0_1;
case V_6_0_0_ID:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
Expand Down Expand Up @@ -72,17 +71,9 @@ public static Query newNestedFilter() {

/**
* Creates a new non-nested docs query
* @param indexVersionCreated the index version created since newer indices can identify a parent field more efficiently
*/
public static Query newNonNestedFilter(Version indexVersionCreated) {
if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) {
public static Query newNonNestedFilter() {
return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME);
} else {
return new BooleanQuery.Builder()
.add(new MatchAllDocsQuery(), Occur.FILTER)
.add(newNestedFilter(), Occur.MUST_NOT)
.build();
}
}

public static BooleanQuery filtered(@Nullable Query query, @Nullable Query filter) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, fin
}

if (hasNested) {
warmUp.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated()));
warmUp.add(Queries.newNonNestedFilter());
}

final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -202,10 +201,7 @@ public void preParse(ParseContext context) {
}

@Override
public void postParse(ParseContext context) throws IOException {
if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) {
super.parse(context);
}
public void postParse(ParseContext context) {
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -255,15 +254,9 @@ public void postParse(ParseContext context) throws IOException {
// we share the parent docs fields to ensure good compression
SequenceIDFields seqID = context.seqID();
assert seqID != null;
final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated();
final boolean includePrimaryTerm = versionCreated.before(Version.V_6_1_0);
for (Document doc : context.nonRootDocuments()) {
doc.add(seqID.seqNo);
doc.add(seqID.seqNoDocValue);
if (includePrimaryTerm) {
// primary terms are used to distinguish between parent and nested docs since 6.1.0
doc.add(seqID.primaryTerm);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ public Query termsQuery(List<?> values, QueryShardContext context) {
.anyMatch(indexType::equals)) {
if (context.getMapperService().hasNested()) {
// type filters are expected not to match nested docs
return Queries.newNonNestedFilter(context.indexVersionCreated());
return Queries.newNonNestedFilter();
} else {
return new MatchAllDocsQuery();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,11 @@

package org.elasticsearch.index.query;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -148,10 +145,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) {
fields = context.simpleMatchToIndexNames(fieldPattern);
}

if (context.indexVersionCreated().before(Version.V_6_1_0)) {
return newLegacyExistsQuery(context, fields);
}

if (fields.size() == 1) {
String field = fields.iterator().next();
return newFieldExistsQuery(context, field);
Expand All @@ -164,28 +157,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) {
return new ConstantScoreQuery(boolFilterBuilder.build());
}

private static Query newLegacyExistsQuery(QueryShardContext context, Collection<String> fields) {
// We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery()
// so we don't end up with deprecation warnings
if (fields.size() == 1) {
Query filter = newLegacyExistsQuery(context, fields.iterator().next());
return new ConstantScoreQuery(filter);
}

BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) {
Query filter = newLegacyExistsQuery(context, field);
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
}
return new ConstantScoreQuery(boolFilterBuilder.build());
}

private static Query newLegacyExistsQuery(QueryShardContext context, String field) {
MappedFieldType fieldType = context.fieldMapper(field);
String fieldName = fieldType != null ? fieldType.name() : field;
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName));
}

private static Query newFieldExistsQuery(QueryShardContext context, String field) {
MappedFieldType fieldType = context.getMapperService().fullName(field);
if (fieldType == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerQuery;
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated()));
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter());
}
Expand Down Expand Up @@ -388,7 +388,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
SearchHit hit = hits[i];
Query rawParentFilter;
if (parentObjectMapper == null) {
rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated());
rawParentFilter = Queries.newNonNestedFilter();
} else {
rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ public float matchCost() {
*/
private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCreated) {
return context -> {
Query query = Queries.newNonNestedFilter(indexVersionCreated);
Query query = Queries.newNonNestedFilter();
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ public Query buildFilteredQuery(Query query) {
&& typeFilter == null // when a _type filter is set, it will automatically exclude nested docs
&& new NestedHelper(mapperService()).mightMatchNestedDocs(query)
&& (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) {
filters.add(Queries.newNonNestedFilter(mapperService().getIndexSettings().getIndexVersionCreated()));
filters.add(Queries.newNonNestedFilter());
}

if (aliasFilter != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA
super(name, factories, context, parentAggregator, pipelineAggregators, metaData);

Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter()
: Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated());
: Queries.newNonNestedFilter();
this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter);
this.childFilter = childObjectMapper.nestedTypeFilter();
this.collectsFromSingleBucket = collectsFromSingleBucket;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public ReverseNestedAggregator(String name, AggregatorFactories factories, Objec
throws IOException {
super(name, factories, context, parent, pipelineAggregators, metaData);
if (objectMapper == null) {
parentFilter = Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated());
parentFilter = Queries.newNonNestedFilter();
} else {
parentFilter = objectMapper.nestedTypeFilter();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ public void execute(SearchContext context) {
private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException {
if (context.mapperService().hasNested()) {
BitSet bits = context.bitsetFilterCache()
.getBitSetProducer(Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()))
.getBitSetProducer(Queries.newNonNestedFilter())
.getBitSet(subReaderContext);
if (!bits.get(subDocId)) {
return bits.nextSetBit(subDocId);
Expand Down Expand Up @@ -363,7 +363,7 @@ private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context
}
parentFilter = nestedParentObjectMapper.nestedTypeFilter();
} else {
parentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated());
parentFilter = Queries.newNonNestedFilter();
}

Query childFilter = nestedObjectMapper.nestedTypeFilter();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ protected static Nested resolveNested(QueryShardContext context, NestedSortBuild
final ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
final Query parentQuery;
if (objectMapper == null) {
parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated());
parentQuery = Queries.newNonNestedFilter();
} else {
parentQuery = objectMapper.nestedTypeFilter();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,33 +20,21 @@
package org.elasticsearch.common.lucene.search;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;

public class QueriesTests extends ESTestCase {

public void testNonNestedQuery() {
for (Version version : VersionUtils.allVersions()) {
// This is a custom query that extends AutomatonQuery and want to make sure the equals method works
assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version));
assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode());
if (version.onOrAfter(Version.V_6_1_0)) {
assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME));
} else {
assertEquals(Queries.newNonNestedFilter(version), new BooleanQuery.Builder()
.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER)
.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT)
.build());
}
}
// This is a custom query that extends AutomatonQuery and want to make sure the equals method works
assertEquals(Queries.newNonNestedFilter(), Queries.newNonNestedFilter());
assertEquals(Queries.newNonNestedFilter().hashCode(), Queries.newNonNestedFilter().hashCode());
assertEquals(Queries.newNonNestedFilter(), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME));
}

public void testIsNegativeQuery() {
Expand Down
Loading

0 comments on commit 319f53b

Please sign in to comment.