diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 61ff4a4ff3d0f..e8ef834a7c84a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -459,13 +459,15 @@ private static ParseContext nestedContext(ParseContext context, ObjectMapper map private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { if (mapper instanceof ObjectMapper) { parseObjectOrNested(context, (ObjectMapper) mapper); - } else { - FieldMapper fieldMapper = (FieldMapper)mapper; + } else if (mapper instanceof FieldMapper) { + FieldMapper fieldMapper = (FieldMapper) mapper; Mapper update = fieldMapper.parse(context); if (update != null) { context.addDynamicMapper(update); } parseCopyFields(context, fieldMapper.copyTo().copyToFields()); + } else { + throw new IllegalArgumentException("Cannot write to a field alias [" + mapper.name() + "]."); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java new file mode 100644 index 0000000000000..4e3c38e7c1ed2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; + +public class FieldAliasMapper extends Mapper { + public static final String CONTENT_TYPE = "alias"; + + public static class Names { + public static final String PATH = "path"; + } + + private final String name; + private final String path; + + public FieldAliasMapper(String simpleName, + String name, + String path) { + super(simpleName); + this.name = name; + this.path = path; + } + + @Override + public String name() { + return name; + } + + public String path() { + return path; + } + + @Override + public Mapper merge(Mapper mergeWith) { + if (!(mergeWith instanceof FieldAliasMapper)) { + throw new IllegalArgumentException("Cannot merge a field alias [" + name() + + "] with a mapping that is not an alias [" + mergeWith.name() + "]."); + } + return mergeWith; + } + + @Override + public Mapper updateFieldType(Map fullNameToFieldType) { + return this; + } + + @Override + public Iterator iterator() { + return Collections.emptyIterator(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject(simpleName()) + .field("type", CONTENT_TYPE) + .field(Names.PATH, path) + .endObject(); + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + FieldAliasMapper.Builder builder = new FieldAliasMapper.Builder(name); + Object pathField = node.remove(Names.PATH); + String path = XContentMapValues.nodeStringValue(pathField, null); + if (path == null) { + throw new IllegalArgumentException("The [path] property must be specified."); + } + return builder.path(path); + } + } + + public static class Builder extends Mapper.Builder { + private String name; + private String path; + + protected Builder(String name) { + super(name); + this.name = name; + } + + public String name() { + return this.name; + } + + public Builder path(String path) { + this.path = path; + return this; + } + + public FieldAliasMapper build(BuilderContext context) { + String fullName = context.path().pathAsText(name); + return new FieldAliasMapper(name, fullName, path); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 069468ddb7a25..76c698ed1a285 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -37,14 +37,18 @@ class FieldTypeLookup implements Iterable { /** Full field name to field type */ final CopyOnWriteHashMap fullNameToFieldType; + final CopyOnWriteHashMap aliasToFullName; /** Create a new empty instance. */ FieldTypeLookup() { fullNameToFieldType = new CopyOnWriteHashMap<>(); + aliasToFullName = new CopyOnWriteHashMap<>(); } - private FieldTypeLookup(CopyOnWriteHashMap fullName) { + private FieldTypeLookup(CopyOnWriteHashMap fullName, + CopyOnWriteHashMap aliasToFullName) { this.fullNameToFieldType = fullName; + this.aliasToFullName = aliasToFullName; } /** @@ -52,13 +56,21 @@ private FieldTypeLookup(CopyOnWriteHashMap fullName) { * from the provided fields. If a field already exists, the field type will be updated * to use the new mappers field type. */ + public FieldTypeLookup copyAndAddAll(String type, Collection fieldMappers) { + return copyAndAddAll(type, fieldMappers, new ArrayList<>()); + } + + public FieldTypeLookup copyAndAddAll(String type, + Collection fieldMappers, + Collection fieldAliasMappers) { Objects.requireNonNull(type, "type must not be null"); if (MapperService.DEFAULT_MAPPING.equals(type)) { throw new IllegalArgumentException("Default mappings should not be added to the lookup"); } CopyOnWriteHashMap fullName = this.fullNameToFieldType; + CopyOnWriteHashMap aliases = this.aliasToFullName; for (FieldMapper fieldMapper : fieldMappers) { MappedFieldType fieldType = fieldMapper.fieldType(); @@ -75,7 +87,14 @@ public FieldTypeLookup copyAndAddAll(String type, Collection fieldM } } } - return new FieldTypeLookup(fullName); + + for (FieldAliasMapper fieldAliasMapper : fieldAliasMappers) { + String aliasName = fieldAliasMapper.name(); + String fieldName = fieldAliasMapper.path(); + aliases = aliases.copyAndPut(aliasName, fieldName); + } + + return new FieldTypeLookup(fullName, aliases); } /** @@ -92,7 +111,10 @@ private void checkCompatibility(MappedFieldType existingFieldType, MappedFieldTy /** Returns the field for the given field */ public MappedFieldType get(String field) { - return fullNameToFieldType.get(field); + String resolvedField = aliasToFullName.get(field); + return resolvedField == null + ? fullNameToFieldType.get(field) + : fullNameToFieldType.get(resolvedField); } /** @@ -105,6 +127,11 @@ public Collection simpleMatchToFullName(String pattern) { fields.add(fieldType.name()); } } + for (String aliasName : aliasToFullName.keySet()) { + if (Regex.simpleMatch(pattern, aliasName)) { + fields.add(aliasName); + } + } return fields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index a06288b67e3bd..b27c4ef39d3a9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -21,7 +21,6 @@ import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -395,15 +394,17 @@ private synchronized Map internalMerge(@Nullable Documen // check basic sanity of the new mapping List objectMappers = new ArrayList<>(); List fieldMappers = new ArrayList<>(); + List fieldAliasMappers = new ArrayList<>(); Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); - MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); + MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers); checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers, fullPathObjectMappers, fieldTypes); checkObjectsCompatibility(objectMappers, fullPathObjectMappers); checkPartitionedIndexConstraints(newMapper); + // TODO: check aliases are valid here? // update lookup data-structures // this will in particular make sure that the merged fields are compatible with other types - fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers); + fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, fieldAliasMappers); for (ObjectMapper objectMapper : objectMappers) { if (fullPathObjectMappers == this.fullPathObjectMappers) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java index ad57d72b345ab..967e2f7153709 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -19,22 +19,32 @@ package org.elasticsearch.index.mapper; +import java.util.ArrayList; import java.util.Collection; enum MapperUtils { ; /** Split mapper and its descendants into object and field mappers. */ - public static void collect(Mapper mapper, Collection objectMappers, Collection fieldMappers) { + public static void collect(Mapper mapper, Collection objectMappers, + Collection fieldMappers) { + collect(mapper, objectMappers, fieldMappers, new ArrayList<>()); + } + + public static void collect(Mapper mapper, Collection objectMappers, + Collection fieldMappers, + Collection fieldAliasMappers) { if (mapper instanceof RootObjectMapper) { // root mapper isn't really an object mapper } else if (mapper instanceof ObjectMapper) { objectMappers.add((ObjectMapper)mapper); } else if (mapper instanceof FieldMapper) { fieldMappers.add((FieldMapper)mapper); + } else if (mapper instanceof FieldAliasMapper) { + fieldAliasMappers.add((FieldAliasMapper) mapper); } for (Mapper child : mapper) { - collect(child, objectMappers, fieldMappers); + collect(child, objectMappers, fieldMappers, fieldAliasMappers); } } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 6c786763003c9..b8f94ff20e504 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; +import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; @@ -111,6 +112,8 @@ private Map getMappers(List mapperPlugi mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); + mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 683f7bbde168f..8d96a51fbf00b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -63,8 +63,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder; - /** * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified * after reducing all of the matches returned by the query phase @@ -118,11 +116,12 @@ public void execute(SearchContext context) { if (context.getObjectMapper(fieldName) != null) { throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); } + } else { + if (fieldNames == null) { + fieldNames = new HashSet<>(); + } + fieldNames.add(fieldType.name()); } - if (fieldNames == null) { - fieldNames = new HashSet<>(); - } - fieldNames.add(fieldName); } } boolean loadSource = context.sourceRequested(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 22895807af69e..5dd05fd1e3b9c 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -36,7 +36,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field; @@ -71,9 +71,9 @@ public HighlightField highlight(HighlighterContext highlighterContext) { SearchContextHighlight.Field field = highlighterContext.field; SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; - FieldMapper mapper = highlighterContext.mapper; + MappedFieldType fieldType = highlighterContext.fieldType; - if (canHighlight(mapper) == false) { + if (canHighlight(fieldType) == false) { throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); } @@ -87,7 +87,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { HighlighterEntry cache = (HighlighterEntry) hitContext.cache().get(CACHE_KEY); try { - MapperHighlightEntry entry = cache.mappers.get(mapper); + FieldHighlightEntry entry = cache.entries.get(fieldType); if (entry == null) { FragListBuilder fragListBuilder; BaseFragmentsBuilder fragmentsBuilder; @@ -97,37 +97,37 @@ public HighlightField highlight(HighlighterContext highlighterContext) { if (field.fieldOptions().numberOfFragments() == 0) { fragListBuilder = new SingleFragListBuilder(); - if (!forceSource && mapper.fieldType().stored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), + if (!forceSource && fieldType.stored()) { + fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, + fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } else { fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset()); if (field.fieldOptions().scoreOrdered()) { - if (!forceSource && mapper.fieldType().stored()) { + if (!forceSource && fieldType.stored()) { fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } else { - fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, + fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } else { - if (!forceSource && mapper.fieldType().stored()) { - fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), + if (!forceSource && fieldType.stored()) { + fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } else { fragmentsBuilder = - new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), + new SourceSimpleFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner); } } } fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue); - entry = new MapperHighlightEntry(); + entry = new FieldHighlightEntry(); if (field.fieldOptions().requireFieldMatch()) { /** * we use top level reader to rewrite the query against all readers, @@ -152,7 +152,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { cache.fvh = new org.apache.lucene.search.vectorhighlight.FastVectorHighlighter(); } CustomFieldQuery.highlightFilters.set(field.fieldOptions().highlightFilter()); - cache.mappers.put(mapper, entry); + cache.entries.put(fieldType, entry); } final FieldQuery fieldQuery; if (field.fieldOptions().requireFieldMatch()) { @@ -173,12 +173,12 @@ public HighlightField highlight(HighlighterContext highlighterContext) { // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), - mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize, + fieldType.name(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), - mapper.fieldType().name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, + fieldType.name(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } @@ -193,7 +193,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), - mapper.fieldType().name(), fieldFragList, 1, field.fieldOptions().preTags(), + fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); @@ -209,9 +209,10 @@ public HighlightField highlight(HighlighterContext highlighterContext) { } @Override - public boolean canHighlight(FieldMapper fieldMapper) { - return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() - && fieldMapper.fieldType().storeTermVectorPositions(); + public boolean canHighlight(MappedFieldType fieldType) { + return fieldType.storeTermVectors() + && fieldType.storeTermVectorOffsets() + && fieldType.storeTermVectorPositions(); } private static BoundaryScanner getBoundaryScanner(Field field) { @@ -244,7 +245,7 @@ private static BoundaryScanner getBoundaryScanner(Field field) { } } - private class MapperHighlightEntry { + private class FieldHighlightEntry { public FragListBuilder fragListBuilder; public FragmentsBuilder fragmentsBuilder; public FieldQuery noFieldMatchFieldQuery; @@ -253,6 +254,6 @@ private class MapperHighlightEntry { private class HighlighterEntry { public org.apache.lucene.search.vectorhighlight.FastVectorHighlighter fvh; - public Map mappers = new HashMap<>(); + public Map entries = new HashMap<>(); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 37971e6b48044..0b3204e193b07 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import java.util.Comparator; import java.util.List; @@ -47,10 +47,10 @@ private FragmentBuilderHelper() { * Fixes problems with broken analysis chains if positions and offsets are messed up that can lead to * {@link StringIndexOutOfBoundsException} in the {@link FastVectorHighlighter} */ - public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { + public static WeightedFragInfo fixWeightedFragInfo(MappedFieldType fieldType, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; - assert mapper.fieldType().name().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); - if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(mapper.fieldType().indexAnalyzer())) { + assert fieldType.name().equals(values[0].name()) : "Expected MappedFieldType for field " + values[0].name(); + if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(fieldType.indexAnalyzer())) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort * the fragments based on their offsets rather than using soley the positions as it is done in diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 6b9121b8f7b71..c3e347f5abc76 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -24,18 +24,16 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; public class HighlightPhase extends AbstractComponent implements FetchSubPhase { @@ -71,8 +69,8 @@ public void hitExecute(SearchContext context, HitContext hitContext) { boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { - FieldMapper fieldMapper = getMapperForField(fieldName, context, hitContext); - if (fieldMapper == null) { + MappedFieldType fieldType = context.mapperService().fullName(fieldName); + if (fieldType == null) { continue; } @@ -85,8 +83,8 @@ public void hitExecute(SearchContext context, HitContext hitContext) { // If the field was explicitly given we assume that whoever issued the query knew // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { - if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && - fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) { + if (fieldType.typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && + fieldType.typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) { continue; } } @@ -104,25 +102,22 @@ public void hitExecute(SearchContext context, HitContext hitContext) { if (highlightQuery == null) { highlightQuery = context.parsedQuery().query(); } - HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, - hitContext, highlightQuery); + HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(), + field, fieldType, context, hitContext, highlightQuery); - if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) { + if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) { // if several fieldnames matched the wildcard then we want to skip those that we cannot highlight continue; } HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { - highlightFields.put(highlightField.name(), highlightField); + // Note that we must make sure to use the original field name in the + // response, as this field could be an alias. + highlightFields.put(fieldName, + new HighlightField(fieldName, highlightField.fragments())); } } } hitContext.hit().highlightFields(highlightFields); } - - private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) { - DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().getType()); - // TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service - return documentMapper.mappers().smartNameFieldMapper(fieldName); - } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index b241a686a248f..c1c42fb45a44a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -22,7 +22,7 @@ import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -46,15 +46,17 @@ private HighlightUtils() { /** * Load field values for highlighting. */ - public static List loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext, - FetchSubPhase.HitContext hitContext) throws IOException { + public static List loadFieldValues(SearchContextHighlight.Field field, + MappedFieldType fieldType, + SearchContext searchContext, + FetchSubPhase.HitContext hitContext) throws IOException { //percolator needs to always load from source, thus it sets the global force source to true boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; - if (!forceSource && mapper.fieldType().stored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(mapper.fieldType().name()), false); + if (!forceSource && fieldType.stored()) { + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().name()); + textsToHighlight = fieldVisitor.fields().get(fieldType.name()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = Collections.emptyList(); @@ -62,7 +64,7 @@ public static List loadFieldValues(SearchContextHighlight.Field field, F } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); - textsToHighlight = sourceLookup.extractRawValues(mapper.fieldType().name()); + textsToHighlight = sourceLookup.extractRawValues(fieldType.name()); } assert textsToHighlight != null; return textsToHighlight; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java index ab76da6e726f1..7de8a9f9a9b29 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/Highlighter.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.fetch.subphase.highlight; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; /** * Highlights a search result. @@ -27,5 +27,5 @@ public interface Highlighter { HighlightField highlight(HighlighterContext highlighterContext); - boolean canHighlight(FieldMapper fieldMapper); + boolean canHighlight(MappedFieldType fieldType); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java index 8643ccb82ea56..3efa19539d142 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterContext.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -27,16 +27,20 @@ public class HighlighterContext { public final String fieldName; public final SearchContextHighlight.Field field; - public final FieldMapper mapper; + public final MappedFieldType fieldType; public final SearchContext context; public final FetchSubPhase.HitContext hitContext; public final Query query; - public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper mapper, SearchContext context, - FetchSubPhase.HitContext hitContext, Query query) { + public HighlighterContext(String fieldName, + SearchContextHighlight.Field field, + MappedFieldType fieldType, + SearchContext context, + FetchSubPhase.HitContext hitContext, + Query query) { this.fieldName = fieldName; this.field = field; - this.mapper = mapper; + this.fieldType = fieldType; this.context = context; this.hitContext = hitContext; this.query = query; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 92fd2359aa941..1ac3f4789cb05 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -36,7 +36,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -59,22 +59,21 @@ public HighlightField highlight(HighlighterContext highlighterContext) { SearchContextHighlight.Field field = highlighterContext.field; SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; - FieldMapper mapper = highlighterContext.mapper; + MappedFieldType fieldType = highlighterContext.fieldType; Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; if (!hitContext.cache().containsKey(CACHE_KEY)) { - Map mappers = new HashMap<>(); - hitContext.cache().put(CACHE_KEY, mappers); + hitContext.cache().put(CACHE_KEY, new HashMap<>()); } @SuppressWarnings("unchecked") - Map cache = - (Map) hitContext.cache().get(CACHE_KEY); + Map cache = + (Map) hitContext.cache().get(CACHE_KEY); - org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); + org.apache.lucene.search.highlight.Highlighter entry = cache.get(fieldType); if (entry == null) { QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, - field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null); + field.fieldOptions().requireFieldMatch() ? fieldType.name() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -96,21 +95,21 @@ public HighlightField highlight(HighlighterContext highlighterContext) { // always highlight across all data entry.setMaxDocCharsToAnalyze(Integer.MAX_VALUE); - cache.put(mapper, entry); + cache.put(fieldType, entry); } // a HACK to make highlighter do highlighting, even though its using the single frag list builder int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), mapper.fieldType()); + Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); try { - textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext); + textsToHighlight = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); for (Object textToHighlight : textsToHighlight) { - String text = convertFieldValue(mapper.fieldType(), textToHighlight); + String text = convertFieldValue(fieldType, textToHighlight); if (text.length() > maxAnalyzedOffset) { throw new IllegalArgumentException( "The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() + @@ -121,7 +120,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { "with unified or fvh highlighter is recommended!"); } - try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { + try (TokenStream tokenStream = analyzer.tokenStream(fieldType.name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; @@ -178,7 +177,7 @@ public int compare(TextFragment o1, TextFragment o2) { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().name(), fieldContents); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } @@ -190,7 +189,7 @@ public int compare(TextFragment o1, TextFragment o2) { } @Override - public boolean canHighlight(FieldMapper fieldMapper) { + public boolean canHighlight(MappedFieldType fieldType) { return true; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java index c1e74bd00bc11..fd3b069c1c0ae 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SimpleFragmentsBuilder.java @@ -23,24 +23,27 @@ import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; /** - * Direct Subclass of Lucene's org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder - * that corrects offsets for broken analysis chains. + * Direct Subclass of Lucene's org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder + * that corrects offsets for broken analysis chains. */ public class SimpleFragmentsBuilder extends org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder { - protected final FieldMapper mapper; + protected final MappedFieldType fieldType; - public SimpleFragmentsBuilder(FieldMapper mapper, - String[] preTags, String[] postTags, BoundaryScanner boundaryScanner) { + public SimpleFragmentsBuilder(MappedFieldType fieldType, + String[] preTags, + String[] postTags, + BoundaryScanner boundaryScanner) { super(preTags, postTags, boundaryScanner); - this.mapper = mapper; + this.fieldType = fieldType; } - + @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ - return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), - preTags, postTags, encoder); + WeightedFragInfo weightedFragInfo = FragmentBuilderHelper.fixWeightedFragInfo(fieldType, values, fragInfo); + return super.makeFragment(buffer, index, values, weightedFragInfo, preTags, postTags, encoder); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java index c4ac54606cbcf..4ce0c6a47a3b6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceScoreOrderFragmentsBuilder.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.vectorhighlight.BoundaryScanner; import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo; import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -35,14 +35,17 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder { - private final FieldMapper mapper; + private final MappedFieldType fieldType; private final SearchContext searchContext; - public SourceScoreOrderFragmentsBuilder(FieldMapper mapper, SearchContext searchContext, String[] preTags, String[] postTags, + public SourceScoreOrderFragmentsBuilder(MappedFieldType fieldType, + SearchContext searchContext, + String[] preTags, + String[] postTags, BoundaryScanner boundaryScanner) { super(preTags, postTags, boundaryScanner); - this.mapper = mapper; + this.fieldType = fieldType; this.searchContext = searchContext; } @@ -52,10 +55,10 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(mapper.fieldType().name()); + List values = sourceLookup.extractRawValues(fieldType.name()); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } @@ -63,7 +66,7 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr @Override protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo, String[] preTags, String[] postTags, Encoder encoder ){ - return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), - preTags, postTags, encoder); + WeightedFragInfo weightedFragInfo = FragmentBuilderHelper.fixWeightedFragInfo(fieldType, values, fragInfo); + return super.makeFragment(buffer, index, values, weightedFragInfo, preTags, postTags, encoder); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java index cd37863a67eea..a7eb65de3b6a4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/SourceSimpleFragmentsBuilder.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.vectorhighlight.BoundaryScanner; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -34,9 +34,12 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { private final SearchContext searchContext; - public SourceSimpleFragmentsBuilder(FieldMapper mapper, SearchContext searchContext, String[] preTags, String[] postTags, + public SourceSimpleFragmentsBuilder(MappedFieldType fieldType, + SearchContext searchContext, + String[] preTags, + String[] postTags, BoundaryScanner boundaryScanner) { - super(mapper, preTags, postTags, boundaryScanner); + super(fieldType, preTags, postTags, boundaryScanner); this.searchContext = searchContext; } @@ -48,13 +51,13 @@ protected Field[] getFields(IndexReader reader, int docId, String fieldName) thr SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(mapper.fieldType().name()); + List values = sourceLookup.extractRawValues(fieldType.name()); if (values.isEmpty()) { return EMPTY_FIELDS; } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.fieldType().name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index c7ada10fcf6fc..7f209ed0586e0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -22,11 +22,11 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.highlight.Encoder; -import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; import org.apache.lucene.search.uhighlight.CustomPassageFormatter; import org.apache.lucene.search.uhighlight.CustomSeparatorBreakIterator; import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; +import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.search.uhighlight.UnifiedHighlighter.OffsetSource; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; @@ -34,7 +34,6 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; @@ -52,13 +51,13 @@ public class UnifiedHighlighter implements Highlighter { @Override - public boolean canHighlight(FieldMapper fieldMapper) { + public boolean canHighlight(MappedFieldType fieldType) { return true; } @Override public HighlightField highlight(HighlighterContext highlighterContext) { - FieldMapper fieldMapper = highlighterContext.mapper; + MappedFieldType fieldType = highlighterContext.fieldType; SearchContextHighlight.Field field = highlighterContext.field; SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; @@ -72,15 +71,15 @@ public HighlightField highlight(HighlighterContext highlighterContext) { try { final Analyzer analyzer = - getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldMapper.fieldType()); - List fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext); + getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); + List fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); fieldValues = fieldValues.stream() - .map((s) -> convertFieldValue(fieldMapper.fieldType(), s)) + .map((s) -> convertFieldValue(fieldType, s)) .collect(Collectors.toList()); final IndexSearcher searcher = new IndexSearcher(hitContext.reader()); final CustomUnifiedHighlighter highlighter; final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); - final OffsetSource offsetSource = getOffsetSource(fieldMapper.fieldType()); + final OffsetSource offsetSource = getOffsetSource(fieldType); if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValue.length() > maxAnalyzedOffset)) { throw new IllegalArgumentException( "The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() + diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java index dcdc669539f53..9199615868a13 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -321,7 +320,7 @@ protected void populateCommonFields(MapperService mapperService, SuggestionSearc suggestionContext.setAnalyzer(luceneAnalyzer); } - suggestionContext.setField(field); + suggestionContext.setField(fieldType.name()); if (size != null) { suggestionContext.setSize(size); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java index 0ec6d7cd83a5c..6ece69999f7fc 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighter.java @@ -19,11 +19,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; -import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; -import org.elasticsearch.search.fetch.subphase.highlight.HighlighterContext; -import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; +import org.elasticsearch.index.mapper.MappedFieldType; import java.util.ArrayList; import java.util.List; @@ -68,7 +64,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { } @Override - public boolean canHighlight(FieldMapper fieldMapper) { + public boolean canHighlight(MappedFieldType fieldType) { return true; } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 9011b0b8dd69c..f703f8bcdcf0d 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -130,6 +130,40 @@ public void testHighlightingWithStoredKeyword() throws IOException { assertHighlight(search, 0, "text", 0, equalTo("foo")); } + public void testHighlightingWithFieldAlias() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("field_alias") + .field("type", "alias") + .field("path", "text") + .endObject() + .endObject().endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("text", "text").endObject()) + .get(); + refresh(); + for (String type : ALL_TYPES) { + HighlightBuilder builder = new HighlightBuilder() + .field(new Field("field_alias").highlighterType(type)) + .requireFieldMatch(randomBoolean()) + .forceSource(randomBoolean()); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("field_alias", "text"))) + .highlighter(builder).get(); + assertHighlight(search, 0, "field_alias", 0, equalTo("text")); + } + } + public void testHighlightingWithWildcardName() throws IOException { // test the kibana case with * as fieldname that will try highlight all fields including meta fields XContentBuilder mappings = jsonBuilder();