Skip to content

Commit

Permalink
Remove unsed fields in RestAnalyzeAction (#66215)
Browse files Browse the repository at this point in the history
Moves ParseFields from `RestAnalyzeAction` to `AnalyzeAction` where they
are actually used for parsing the request body.
  • Loading branch information
chengyang14 authored and Christoph Büscher committed Dec 14, 2020
1 parent d2bd9db commit f1a709c
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
package org.elasticsearch.client.ml.job.config;

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
Expand Down Expand Up @@ -59,9 +60,9 @@
public class CategorizationAnalyzerConfig implements ToXContentFragment {

public static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer");
private static final ParseField TOKENIZER = RestAnalyzeAction.Fields.TOKENIZER;
private static final ParseField TOKEN_FILTERS = RestAnalyzeAction.Fields.TOKEN_FILTERS;
private static final ParseField CHAR_FILTERS = RestAnalyzeAction.Fields.CHAR_FILTERS;
private static final ParseField TOKENIZER = AnalyzeAction.Fields.TOKENIZER;
private static final ParseField TOKEN_FILTERS = AnalyzeAction.Fields.TOKEN_FILTERS;
private static final ParseField CHAR_FILTERS = AnalyzeAction.Fields.CHAR_FILTERS;

/**
* This method is only used in the unit tests - in production code this config is always parsed as a fragment.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,18 @@ private AnalyzeAction() {
super(NAME, AnalyzeAction.Response::new);
}

public static class Fields {
public static final ParseField ANALYZER = new ParseField("analyzer");
public static final ParseField TEXT = new ParseField("text");
public static final ParseField FIELD = new ParseField("field");
public static final ParseField TOKENIZER = new ParseField("tokenizer");
public static final ParseField TOKEN_FILTERS = new ParseField("filter");
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
public static final ParseField EXPLAIN = new ParseField("explain");
public static final ParseField ATTRIBUTES = new ParseField("attributes");
public static final ParseField NORMALIZER = new ParseField("normalizer");
}

/**
* A request to analyze a text associated with a specific index. Allow to provide
* the actual analyzer name to perform the analysis with.
Expand Down Expand Up @@ -265,19 +277,20 @@ public static Request fromXContent(XContentParser parser, String index) throws I
}

private static final ObjectParser<Request, Void> PARSER = new ObjectParser<>("analyze_request");

static {
PARSER.declareStringArray(Request::text, new ParseField("text"));
PARSER.declareString(Request::analyzer, new ParseField("analyzer"));
PARSER.declareStringArray(Request::text, Fields.TEXT);
PARSER.declareString(Request::analyzer, Fields.ANALYZER);
PARSER.declareField(Request::tokenizer, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("tokenizer"), ObjectParser.ValueType.OBJECT_OR_STRING);
Fields.TOKENIZER, ObjectParser.ValueType.OBJECT_OR_STRING);
PARSER.declareObjectArray(Request::setTokenFilters, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("filter"));
Fields.TOKEN_FILTERS);
PARSER.declareObjectArray(Request::setCharFilters, (p, c) -> NameOrDefinition.fromXContent(p),
new ParseField("char_filter"));
PARSER.declareString(Request::field, new ParseField("field"));
PARSER.declareBoolean(Request::explain, new ParseField("explain"));
PARSER.declareStringArray(Request::attributes, new ParseField("attributes"));
PARSER.declareString(Request::normalizer, new ParseField("normalizer"));
Fields.CHAR_FILTERS);
PARSER.declareString(Request::field, Fields.FIELD);
PARSER.declareBoolean(Request::explain, Fields.EXPLAIN);
PARSER.declareStringArray(Request::attributes, Fields.ATTRIBUTES);
PARSER.declareString(Request::normalizer, Fields.NORMALIZER);
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestRequest;
Expand All @@ -36,18 +35,6 @@

public class RestAnalyzeAction extends BaseRestHandler {

public static class Fields {
public static final ParseField ANALYZER = new ParseField("analyzer");
public static final ParseField TEXT = new ParseField("text");
public static final ParseField FIELD = new ParseField("field");
public static final ParseField TOKENIZER = new ParseField("tokenizer");
public static final ParseField TOKEN_FILTERS = new ParseField("filter");
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
public static final ParseField EXPLAIN = new ParseField("explain");
public static final ParseField ATTRIBUTES = new ParseField("attributes");
public static final ParseField NORMALIZER = new ParseField("normalizer");
}

@Override
public List<Route> routes() {
return unmodifiableList(asList(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.ml.job.config;

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
Expand Down Expand Up @@ -54,9 +55,9 @@
public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeable {

public static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer");
public static final ParseField TOKENIZER = RestAnalyzeAction.Fields.TOKENIZER;
public static final ParseField TOKEN_FILTERS = RestAnalyzeAction.Fields.TOKEN_FILTERS;
public static final ParseField CHAR_FILTERS = RestAnalyzeAction.Fields.CHAR_FILTERS;
public static final ParseField TOKENIZER = AnalyzeAction.Fields.TOKENIZER;
public static final ParseField TOKEN_FILTERS = AnalyzeAction.Fields.TOKEN_FILTERS;
public static final ParseField CHAR_FILTERS = AnalyzeAction.Fields.CHAR_FILTERS;

/**
* This method is only used in the unit tests - in production code this config is always parsed as a fragment.
Expand Down

0 comments on commit f1a709c

Please sign in to comment.