Skip to content

Commit

Permalink
Add support for storing JSON fields. (#34942)
Browse files Browse the repository at this point in the history
  • Loading branch information
jtibshirani committed Mar 6, 2019
1 parent c0506a6 commit 2853b6a
Show file tree
Hide file tree
Showing 4 changed files with 91 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;

import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
Expand All @@ -28,11 +29,14 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
Expand Down Expand Up @@ -71,6 +75,9 @@
*
* Note that \0 is a reserved separator character, and cannot be used in the keys of the JSON object
* (see {@link JsonFieldParser#SEPARATOR}).
*
* When 'store' is enabled, a single stored field is added containing the entire JSON object in
* pretty-printed format.
*/
public final class JsonFieldMapper extends FieldMapper {

Expand Down Expand Up @@ -139,12 +146,6 @@ public Builder copyTo(CopyTo copyTo) {
throw new UnsupportedOperationException("[copy_to] is not supported for [" + CONTENT_TYPE + "] fields.");
}

@Override
public Builder store(boolean store) {
throw new UnsupportedOperationException("[store] is not currently supported for [" +
CONTENT_TYPE + "] fields.");
}

@Override
public JsonFieldMapper build(BuilderContext context) {
setupFieldType(context);
Expand Down Expand Up @@ -377,7 +378,8 @@ private JsonFieldMapper(String simpleName,
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;

this.ignoreAbove = ignoreAbove;
this.fieldParser = new JsonFieldParser(fieldType.name(), keyedFieldName(), fieldType, ignoreAbove);
this.fieldParser = new JsonFieldParser(fieldType.name(), keyedFieldName(),
ignoreAbove, fieldType.nullValueAsString());
}

@Override
Expand Down Expand Up @@ -415,12 +417,36 @@ protected void parseCreateField(ParseContext context, List<IndexableField> field
return;
}

if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
fields.addAll(fieldParser.parse(context.parser()));
createFieldNamesField(context, fields);
} else {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
context.parser().skipChildren();
return;
}

BytesRef storedValue = null;
if (fieldType.stored()) {
XContentBuilder builder = XContentFactory.jsonBuilder()
.prettyPrint()
.copyCurrentStructure(context.parser());
storedValue = BytesReference.bytes(builder).toBytesRef();
fields.add(new StoredField(fieldType.name(), storedValue));
}

if (fieldType().indexOptions() != IndexOptions.NONE) {
XContentParser indexedFieldsParser = context.parser();

// If store is enabled, we've already consumed the content to produce the stored field. Here we
// 'reset' the parser, so that we can traverse the content again.
if (storedValue != null) {
indexedFieldsParser = JsonXContent.jsonXContent.createParser(context.parser().getXContentRegistry(),
context.parser().getDeprecationHandler(),
storedValue.bytes);
indexedFieldsParser.nextToken();
}

fields.addAll(fieldParser.parse(indexedFieldsParser));
}

createFieldNamesField(context, fields);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;

import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.xcontent.XContentParser;
Expand All @@ -39,17 +40,17 @@ public class JsonFieldParser {
private final String rootFieldName;
private final String keyedFieldName;

private final MappedFieldType fieldType;
private final int ignoreAbove;
private final String nullValueAsString;

JsonFieldParser(String rootFieldName,
String keyedFieldName,
MappedFieldType fieldType,
int ignoreAbove) {
int ignoreAbove,
String nullValueAsString) {
this.rootFieldName = rootFieldName;
this.keyedFieldName = keyedFieldName;
this.fieldType = fieldType;
this.ignoreAbove = ignoreAbove;
this.nullValueAsString = nullValueAsString;
}

public List<IndexableField> parse(XContentParser parser) throws IOException {
Expand Down Expand Up @@ -111,9 +112,8 @@ private void parseFieldValue(XContentParser.Token token,
String value = parser.text();
addField(path, currentName, value, fields);
} else if (token == XContentParser.Token.VALUE_NULL) {
String value = fieldType.nullValueAsString();
if (value != null) {
addField(path, currentName, value, fields);
if (nullValueAsString != null) {
addField(path, currentName, nullValueAsString, fields);
}
} else {
// Note that we throw an exception here just to be safe. We don't actually expect to reach
Expand All @@ -137,8 +137,8 @@ private void addField(ContentPath path,
}
String keyedValue = createKeyedValue(key, value);

fields.add(new Field(rootFieldName, new BytesRef(value), fieldType));
fields.add(new Field(keyedFieldName, new BytesRef(keyedValue), fieldType));
fields.add(new StringField(rootFieldName, new BytesRef(value), Field.Store.NO));
fields.add(new StringField(keyedFieldName, new BytesRef(keyedValue), Field.Store.NO));
}

public static String createKeyedValue(String key, String value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.JsonFieldMapper.RootJsonFieldType;
import org.elasticsearch.plugins.Plugin;
Expand Down Expand Up @@ -130,16 +131,51 @@ public void testEnableStore() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("field")
.startObject("store_and_index")
.field("type", "json")
.field("store", true)
.endObject()
.startObject("store_only")
.field("type", "json")
.field("index", false)
.field("store", true)
.endObject()
.endObject()
.endObject()
.endObject());

expectThrows(UnsupportedOperationException.class, () ->
parser.parse("type", new CompressedXContent(mapping)));
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());

BytesReference doc = BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("store_only")
.field("key", "value")
.endObject()
.startObject("store_and_index")
.field("key", "value")
.endObject()
.endObject());
ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", doc, XContentType.JSON));

// We make sure to pretty-print here, since the field is always stored in pretty-printed format.
BytesReference storedValue = BytesReference.bytes(JsonXContent.contentBuilder()
.prettyPrint()
.startObject()
.field("key", "value")
.endObject());

IndexableField[] storeOnly = parsedDoc.rootDoc().getFields("store_only");
assertEquals(1, storeOnly.length);

assertTrue(storeOnly[0].fieldType().stored());
assertEquals(storedValue.toBytesRef(), storeOnly[0].binaryValue());

IndexableField[] storeAndIndex = parsedDoc.rootDoc().getFields("store_and_index");
assertEquals(2, storeAndIndex.length);

assertTrue(storeAndIndex[0].fieldType().stored());
assertEquals(storedValue.toBytesRef(), storeAndIndex[0].binaryValue());
assertFalse(storeAndIndex[1].fieldType().stored());
}

public void testIndexOptions() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,7 @@ public class JsonFieldParserTests extends ESTestCase {
@Before
public void setUp() throws Exception {
super.setUp();

MappedFieldType fieldType = new RootJsonFieldType();
fieldType.setName("field");
parser = new JsonFieldParser("field", "field._keyed", fieldType, Integer.MAX_VALUE);
parser = new JsonFieldParser("field", "field._keyed", Integer.MAX_VALUE, null);
}

public void testTextValues() throws Exception {
Expand Down Expand Up @@ -222,9 +219,9 @@ public void testIgnoreAbove() throws Exception {

RootJsonFieldType fieldType = new RootJsonFieldType();
fieldType.setName("field");
JsonFieldParser ignoreAboveParser = new JsonFieldParser("field", "field._keyed", fieldType, 10);
JsonFieldParser parserWithIgnoreAbove = new JsonFieldParser("field", "field._keyed", 10, null);

List<IndexableField> fields = ignoreAboveParser.parse(xContentParser);
List<IndexableField> fields = parserWithIgnoreAbove.parse(xContentParser);
assertEquals(0, fields.size());
}

Expand All @@ -236,13 +233,10 @@ public void testNullValues() throws Exception {
assertEquals(0, fields.size());

xContentParser = createXContentParser(input);
JsonFieldParser parserWithNullValue = new JsonFieldParser("field", "field._keyed",
Integer.MAX_VALUE, "placeholder");

RootJsonFieldType fieldType = new RootJsonFieldType();
fieldType.setName("field");
fieldType.setNullValue("placeholder");
JsonFieldParser nullValueParser = new JsonFieldParser("field", "field._keyed", fieldType, Integer.MAX_VALUE);

fields = nullValueParser.parse(xContentParser);
fields = parserWithNullValue.parse(xContentParser);
assertEquals(2, fields.size());

IndexableField field = fields.get(0);
Expand Down

0 comments on commit 2853b6a

Please sign in to comment.