Skip to content

Commit

Permalink
Allow metadata fields in the _source (#61590)
Browse files Browse the repository at this point in the history
* Configurable metadata field mappers in the _source

* Changes to support metadata fields in _source
Added test testDocumentContainsAllowedMetadataField()

* Merged DocumentParserTests from master
Fixed broken tests

* Handle non string values

* Allow metadata fields to parse values/objects/arrays/null

* Removed MetadataFieldMapper.isAllowedInSource() method

Delegated this functionality to MetadataFieldMapper.parse()

* Fixed bug that caused tests to break

* Cleanup parsing for existing metadata fields

* Cleanup parsing for existing metadata fields

* Remove doParse() method

* Fix broken test

* Lookup metadata mapper by name

Instead of linear scan
  • Loading branch information
csoulios authored Sep 18, 2020
1 parent f7a090a commit 55294e5
Show file tree
Hide file tree
Showing 23 changed files with 168 additions and 184 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -65,17 +65,6 @@ private RankFeatureMetaFieldMapper() {
super(RankFeatureMetaFieldType.INSTANCE);
}

@Override
public void preParse(ParseContext context) {}

@Override
protected void parseCreateField(ParseContext context) {
throw new AssertionError("Should never be called");
}

@Override
public void postParse(ParseContext context) {}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ public void testDocumentParsingFailsOnMetaField() throws Exception {
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
assertTrue(
e.getCause().getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -79,23 +79,9 @@ public boolean enabled() {
return this.enabled.value();
}

@Override
public void preParse(ParseContext context) {
}

@Override
public void postParse(ParseContext context) throws IOException {
// we post parse it so we get the size stored, possibly compressed (source will be preParse)
super.parse(context);
}

@Override
public void parse(ParseContext context) {
// nothing to do here, we call the parent in postParse
}

@Override
protected void parseCreateField(ParseContext context) {
if (enabled.value() == false) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -392,10 +392,7 @@ private static void innerParseObject(ParseContext context, ObjectMapper mapper,
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
paths = splitAndValidatePath(currentFieldName);
if (context.mapperService().isMetadataField(context.path().pathAsText(currentFieldName))) {
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
} else if (containsDisabledObjectMapper(mapper, paths)) {
if (containsDisabledObjectMapper(mapper, paths)) {
parser.nextToken();
parser.skipChildren();
}
Expand Down Expand Up @@ -483,7 +480,7 @@ private static void parseObject(final ParseContext context, ObjectMapper mapper,
String[] paths) throws IOException {
assert currentFieldName != null;

Mapper objectMapper = getMapper(mapper, currentFieldName, paths);
Mapper objectMapper = getMapper(context, mapper, currentFieldName, paths);
if (objectMapper != null) {
context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper);
Expand Down Expand Up @@ -520,7 +517,7 @@ private static void parseArray(ParseContext context, ObjectMapper parentMapper,
String[] paths) throws IOException {
String arrayFieldName = lastFieldName;

Mapper mapper = getMapper(parentMapper, lastFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// There is a concrete mapper for this field already. Need to check if the mapper
// expects an array, if so we pass the context straight to the mapper and if not
Expand Down Expand Up @@ -597,7 +594,7 @@ private static void parseValue(final ParseContext context, ObjectMapper parentMa
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with"
+ " no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = getMapper(parentMapper, currentFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, currentFieldName, paths);
if (mapper != null) {
parseObjectOrField(context, mapper);
} else {
Expand All @@ -614,7 +611,7 @@ private static void parseValue(final ParseContext context, ObjectMapper parentMa
private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName,
String[] paths) throws IOException {
// we can only handle null values if we have mappings for them
Mapper mapper = getMapper(parentMapper, lastFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// TODO: passing null to an object seems bogus?
parseObjectOrField(context, mapper);
Expand Down Expand Up @@ -882,9 +879,16 @@ private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper parentMapper,
}

// looks up a child mapper, but takes into account field names that expand to objects
private static Mapper getMapper(ObjectMapper objectMapper, String fieldName, String[] subfields) {
private static Mapper getMapper(final ParseContext context, ObjectMapper objectMapper, String fieldName, String[] subfields) {
String fieldPath = context.path().pathAsText(fieldName);
// Check if mapper is a metadata mapper first
Mapper mapper = context.docMapper().mapping().getMetadataMapper(fieldPath);
if (mapper != null) {
return mapper;
}

for (int i = 0; i < subfields.length - 1; ++i) {
Mapper mapper = objectMapper.getMapper(subfields[i]);
mapper = objectMapper.getMapper(subfields[i]);
if (mapper == null || (mapper instanceof ObjectMapper) == false) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.query.QueryShardContext;

import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
Expand Down Expand Up @@ -160,19 +159,6 @@ public FieldNamesFieldType fieldType() {
return (FieldNamesFieldType) super.fieldType();
}

@Override
public void preParse(ParseContext context) {
}

@Override
public void postParse(ParseContext context) {
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
// Adding values to the _field_names field is handled by the mappers for each field type
}

static Iterable<String> extractFieldNames(final String fullPath) {
return new Iterable<String>() {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,11 +257,6 @@ private IdFieldMapper() {

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesRef id = Uid.encodeId(context.sourceToParse().id());
context.doc().add(new Field(NAME, id, Defaults.FIELD_TYPE));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,22 +82,8 @@ private IgnoredFieldMapper() {
super(IgnoredFieldType.INSTANCE);
}

@Override
public void preParse(ParseContext context) throws IOException {
}

@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
public void parse(ParseContext context) throws IOException {
// done in post-parse
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
for (String field : context.getIgnoredFields()) {
context.doc().add(new Field(NAME, field, Defaults.FIELD_TYPE));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.Collections;
import java.util.function.Supplier;

Expand Down Expand Up @@ -73,12 +72,6 @@ public IndexFieldMapper() {
super(IndexFieldType.INSTANCE);
}

@Override
public void preParse(ParseContext context) throws IOException {}

@Override
protected void parseCreateField(ParseContext context) throws IOException {}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,18 @@ public final class Mapping implements ToXContentFragment {
final RootObjectMapper root;
final MetadataFieldMapper[] metadataMappers;
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
final Map<String, MetadataFieldMapper> metadataMappersByName;
final Map<String, Object> meta;

public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper,
MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
this.indexCreated = indexCreated;
this.metadataMappers = metadataMappers;
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
Map<String, MetadataFieldMapper> metadataMappersByName = new HashMap<>();
for (MetadataFieldMapper metadataMapper : metadataMappers) {
metadataMappersMap.put(metadataMapper.getClass(), metadataMapper);
metadataMappersByName.put(metadataMapper.name(), metadataMapper);
}
this.root = rootObjectMapper;
// keep root mappers sorted for consistent serialization
Expand All @@ -67,6 +70,7 @@ public int compare(Mapper o1, Mapper o2) {
}
});
this.metadataMappersMap = unmodifiableMap(metadataMappersMap);
this.metadataMappersByName = unmodifiableMap(metadataMappersByName);
this.meta = meta;
}

Expand Down Expand Up @@ -136,6 +140,10 @@ public Mapping merge(Mapping mergeWith, MergeReason reason) {
return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
}

public MetadataFieldMapper getMetadataMapper(String mapperName) {
return metadataMappersByName.get(mapperName);
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,10 +153,18 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params)
return builder.endObject();
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
throw new MapperParsingException("Field [" + name() + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
}

/**
* Called before {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
*/
public abstract void preParse(ParseContext context) throws IOException;
public void preParse(ParseContext context) throws IOException {
// do nothing
}

/**
* Called after {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
Expand All @@ -169,5 +177,4 @@ public void postParse(ParseContext context) throws IOException {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.QueryShardContext;

import java.io.IOException;
import java.util.Collections;

public class NestedPathFieldMapper extends MetadataFieldMapper {
Expand Down Expand Up @@ -93,24 +92,9 @@ private NestedPathFieldMapper(Settings settings) {
super(new NestedPathFieldType(settings));
}

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
public void parse(ParseContext context) throws IOException {
// we parse in pre parse
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
protected String contentType() {
return NAME;
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -117,18 +117,6 @@ public boolean required() {

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
public void parse(ParseContext context) throws IOException {
// no need ot parse here, we either get the routing in the sourceToParse
// or we don't have routing, if we get it in sourceToParse, we process it in preParse
// which will always be called
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
String routing = context.sourceToParse().routing();
if (routing != null) {
context.doc().add(new Field(fieldType().name(), routing, Defaults.FIELD_TYPE));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,11 +182,6 @@ public SeqNoFieldMapper() {

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
// see InternalEngine.innerIndex to see where the real version value is set
// also see ParsedDocument.updateSeqID (called by innerIndex)
SequenceIDFields seqID = SequenceIDFields.emptySeqID();
Expand All @@ -196,11 +191,6 @@ protected void parseCreateField(ParseContext context) throws IOException {
context.doc().add(seqID.primaryTerm);
}

@Override
public void parse(ParseContext context) throws IOException {
// fields are added in parseCreateField
}

@Override
public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with the original
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,16 +156,6 @@ public boolean isComplete() {

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesReference originalSource = context.sourceToParse().source();
XContentType contentType = context.sourceToParse().getXContentType();
final BytesReference adaptedSource = applyFilters(originalSource, contentType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,21 +168,6 @@ private TypeFieldMapper() {
super(new TypeFieldType());
}

@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}

@Override
public void parse(ParseContext context) throws IOException {
// we parse in pre parse
}

@Override
protected void parseCreateField(ParseContext context) throws IOException {

}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Loading

0 comments on commit 55294e5

Please sign in to comment.