Skip to content

Commit

Permalink
Fix AnalyzeAction response serialization (#44284)
Browse files Browse the repository at this point in the history
Currently we loose information about whether a token list in an AnalyzeAction
response is null or an empty list, because we write a 0 value to the stream in
both cases and deserialize to a null value on the receiving side. This change
fixes this so we write an additional flag indicating whether the value is null
or not, followed by the size of the list and its content.

Closes #44078
  • Loading branch information
Christoph Büscher committed Jul 14, 2019
1 parent 9ee7062 commit ef16bea
Show file tree
Hide file tree
Showing 5 changed files with 207 additions and 84 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,10 @@
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class AnalyzeResponseTests extends AbstractResponseTestCase<AnalyzeAction.Response, AnalyzeResponse> {

Expand All @@ -37,7 +34,7 @@ protected AnalyzeAction.Response createServerTestInstance() {
int tokenCount = randomIntBetween(1, 30);
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = randomToken();
tokens[i] = RandomObjects.randomToken(random());
}
if (randomBoolean()) {
AnalyzeAction.CharFilteredText[] charfilters = null;
Expand All @@ -62,45 +59,6 @@ protected AnalyzeAction.Response createServerTestInstance() {
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
}

private AnalyzeAction.AnalyzeToken randomToken() {
String token = randomAlphaOfLengthBetween(1, 20);
int position = randomIntBetween(0, 1000);
int startOffset = randomIntBetween(0, 1000);
int endOffset = randomIntBetween(0, 1000);
int posLength = randomIntBetween(1, 5);
String type = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> extras = new HashMap<>();
if (randomBoolean()) {
int entryCount = randomInt(6);
for (int i = 0; i < entryCount; i++) {
switch (randomInt(6)) {
case 0:
case 1:
case 2:
case 3:
String key = randomAlphaOfLength(5);
String value = randomAlphaOfLength(10);
extras.put(key, value);
break;
case 4:
String objkey = randomAlphaOfLength(5);
Map<String, String> obj = new HashMap<>();
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
extras.put(objkey, obj);
break;
case 5:
String listkey = randomAlphaOfLength(5);
List<String> list = new ArrayList<>();
list.add(randomAlphaOfLength(4));
list.add(randomAlphaOfLength(6));
extras.put(listkey, list);
break;
}
}
}
return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
}

@Override
protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException {
return AnalyzeResponse.fromXContent(parser);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@

package org.elasticsearch.action.admin.indices.analyze;

import org.elasticsearch.action.ActionType;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -292,22 +293,29 @@ public static class Response extends ActionResponse implements ToXContentObject
private final List<AnalyzeToken> tokens;

public Response(List<AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
if (tokens == null && detail == null) {
throw new IllegalArgumentException("Neither token nor detail set on AnalysisAction.Response");
}
this.tokens = tokens;
this.detail = detail;
}

public Response(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
AnalyzeToken[] tokenArray = in.readOptionalArray(AnalyzeToken::new, AnalyzeToken[]::new);
tokens = tokenArray != null ? Arrays.asList(tokenArray) : null;
} else {
int size = in.readVInt();
if (size > 0) {
tokens = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
tokens.add(new AnalyzeToken(in));
}
} else {
tokens = null;
}
}
else {
tokens = null;
}
detail = in.readOptionalWriteable(DetailAnalyzeResponse::new);
}

Expand Down Expand Up @@ -347,21 +355,33 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_7_3_0)) {
AnalyzeToken[] tokenArray = null;
if (tokens != null) {
tokenArray = tokens.toArray(new AnalyzeToken[0]);
}
out.writeOptionalArray(tokenArray);
} else {
out.writeVInt(0);
if (tokens != null) {
out.writeVInt(tokens.size());
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
out.writeOptionalWriteable(detail);
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Response that = (Response) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
Expand Down Expand Up @@ -402,8 +422,12 @@ public static class AnalyzeToken implements Writeable, ToXContentObject {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnalyzeToken that = (AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
Expand Down Expand Up @@ -583,8 +607,12 @@ public AnalyzeTokenList[] tokenfilters() {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Expand Down Expand Up @@ -670,8 +698,12 @@ public static class AnalyzeTokenList implements Writeable, ToXContentObject {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
Expand All @@ -691,16 +723,19 @@ public AnalyzeTokenList(String name, AnalyzeToken[] tokens) {

AnalyzeTokenList(StreamInput in) throws IOException {
name = in.readString();
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeToken(in);
if (in.getVersion().onOrAfter(Version.V_7_3_0)) {
tokens = in.readOptionalArray(AnalyzeToken::new, AnalyzeToken[]::new);
} else {
int size = in.readVInt();
if (size > 0) {
tokens = new AnalyzeToken[size];
for (int i = 0; i < size; i++) {
tokens[i] = new AnalyzeToken(in);
}
} else {
tokens = null;
}
}
else {
tokens = null;
}
}

public String getName() {
Expand Down Expand Up @@ -733,13 +768,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
if (out.getVersion().onOrAfter(Version.V_7_3_0)) {
out.writeOptionalArray(tokens);
} else {
out.writeVInt(0);
if (tokens != null) {
out.writeVInt(tokens.length);
for (AnalyzeToken token : tokens) {
token.writeTo(out);
}
} else {
out.writeVInt(0);
}
}
}
}
Expand Down Expand Up @@ -790,8 +829,12 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,25 @@

package org.elasticsearch.action.admin.indices.analyze;

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

import static org.hamcrest.Matchers.equalTo;

public class AnalyzeResponseTests extends ESTestCase {
public class AnalyzeResponseTests extends AbstractWireSerializingTestCase<AnalyzeAction.Response> {

@SuppressWarnings("unchecked")
public void testNullResponseToXContent() throws IOException {
Expand All @@ -59,6 +64,64 @@ public void testNullResponseToXContent() throws IOException {
assertThat(nullTokens.size(), equalTo(0));
assertThat(name, equalTo(nameValue));
}
}

public void testConstructorArgs() {
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new AnalyzeAction.Response(null, null));
assertEquals("Neither token nor detail set on AnalysisAction.Response", ex.getMessage());
}

@Override
protected AnalyzeAction.Response createTestInstance() {
int tokenCount = randomIntBetween(0, 30);
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
for (int i = 0; i < tokenCount; i++) {
tokens[i] = RandomObjects.randomToken(random());
}
if (randomBoolean()) {
AnalyzeAction.CharFilteredText[] charfilters = null;
AnalyzeAction.AnalyzeTokenList[] tokenfilters = null;
if (randomBoolean()) {
charfilters = new AnalyzeAction.CharFilteredText[]{
new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"})
};
}
if (randomBoolean()) {
tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens),
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens)
};
}
AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse(
charfilters,
new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens),
tokenfilters);
return new AnalyzeAction.Response(null, dar);
}
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
}

/**
* Either add a token to the token list or change the details token list name
*/
@Override
protected AnalyzeAction.Response mutateInstance(AnalyzeAction.Response instance) throws IOException {
if (instance.getTokens() != null) {
List<AnalyzeToken> extendedList = new ArrayList<>(instance.getTokens());
extendedList.add(RandomObjects.randomToken(random()));
return new AnalyzeAction.Response(extendedList, null);
} else {
AnalyzeToken[] tokens = instance.detail().tokenizer().getTokens();
return new AnalyzeAction.Response(null, new AnalyzeAction.DetailAnalyzeResponse(
instance.detail().charfilters(),
new AnalyzeAction.AnalyzeTokenList("my_other_tokenizer", tokens),
instance.detail().tokenfilters()));
}
}

@Override
protected Reader<AnalyzeAction.Response> instanceReader() {
return AnalyzeAction.Response::new;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.core.IsNull;

Expand All @@ -41,6 +42,7 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;

@ClusterScope(minNumDataNodes = 2)
public class AnalyzeActionIT extends ESIntegTestCase {

@Override
Expand Down Expand Up @@ -387,5 +389,16 @@ public void testAnalyzeNormalizedKeywordField() throws IOException {
assertThat(token.getPositionLength(), equalTo(1));
}

/**
* Input text that doesn't produce tokens should return an empty token list
*/
public void testZeroTokenAnalysis() throws IOException {
assertAcked(prepareCreate("test"));
ensureGreen("test");

AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("test", ".").get();
assertNotNull(analyzeResponse.getTokens());
assertThat(analyzeResponse.getTokens().size(), equalTo(0));
}

}
Loading

0 comments on commit ef16bea

Please sign in to comment.