Skip to content

Commit

Permalink
[Remove] LegacyESVersion.V_7_6_* and V_7_7_* constants
Browse files Browse the repository at this point in the history
Removes all usages of LegacyESVersion.V_7_6_ and LegacyESVersion.V_7_7_ version
constants along with ancient API logic.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize committed Oct 19, 2022
1 parent ec34737 commit 166612b
Show file tree
Hide file tree
Showing 59 changed files with 216 additions and 652 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.ElisionFilter;
import org.apache.lucene.util.SetOnce;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.client.Client;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.service.ClusterService;
Expand Down Expand Up @@ -347,7 +347,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new);
tokenizers.put("thai", ThaiTokenizerFactory::new);
tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
Expand All @@ -358,7 +363,12 @@ public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
});
tokenizers.put("ngram", NGramTokenizerFactory::new);
tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) {
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
Expand Down Expand Up @@ -606,7 +616,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {

// Temporary shim for aliases. TODO deprecate after they are moved
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("nGram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
if (version.onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [nGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"nGram_tokenizer_deprecation",
"The [nGram] tokenizer name is deprecated and will be removed in a future version. "
Expand All @@ -616,7 +631,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
return new NGramTokenizer();
}));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edgeNGram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_6_0)) {
if (version.onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_tokenizer_deprecation",
"The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand All @@ -24,11 +23,6 @@
* max_graph_expansions is 100 as the default value of 10_000 seems to be unnecessarily large and preserve_separator is false.
*
* <ul>
* <li>preserve_separator:
* For LegacyESVersion lesser than {@link LegacyESVersion#V_7_6_0} i.e. lucene versions lesser
* than {@link org.apache.lucene.util.Version#LUCENE_8_4_0}
* Whether {@link ConcatenateGraphFilter#SEP_LABEL} should separate the input tokens in the concatenated token.
* </li>
* <li>token_separator:
* Separator to use for concatenation. Must be a String with a single character or empty.
* If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_TOKEN_SEPARATOR} will be used.
Expand Down Expand Up @@ -59,17 +53,11 @@ public class ConcatenateGraphTokenFilterFactory extends AbstractTokenFilterFacto
ConcatenateGraphTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);

if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { // i.e. Lucene 8.4.0
String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR);
if (separator.length() > 1) {
throw new IllegalArgumentException("token_separator must be either empty or a single character");
}
tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating
} else {
boolean preserveSep = settings.getAsBoolean("preserve_separator", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP);
tokenSeparator = preserveSep ? ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR : null;
String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR);
if (separator.length() > 1) {
throw new IllegalArgumentException("token_separator must be either empty or a single character");
}

tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating
maxGraphExpansions = settings.getAsInt("max_graph_expansions", DEFAULT_MAX_GRAPH_EXPANSIONS);
preservePositionIncrements = settings.getAsBoolean("preserve_position_increments", DEFAULT_PRESERVE_POSITION_INCREMENTS);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
package org.opensearch.geo.search.aggregations.bucket.composite;

import org.apache.lucene.index.IndexReader;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoBoundingBox;
import org.opensearch.common.geo.GeoPoint;
Expand Down Expand Up @@ -175,9 +174,7 @@ public static void register(ValuesSourceRegistry.Builder builder) {
public GeoTileGridValuesSourceBuilder(StreamInput in) throws IOException {
super(in);
this.precision = in.readInt();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
this.geoBoundingBox = new GeoBoundingBox(in);
}
this.geoBoundingBox = new GeoBoundingBox(in);
}

public GeoTileGridValuesSourceBuilder precision(int precision) {
Expand All @@ -198,9 +195,7 @@ public GeoTileGridValuesSourceBuilder format(String format) {
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeInt(precision);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox.writeTo(out);
}
geoBoundingBox.writeTo(out);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.geo.search.aggregations.bucket.geogrid;

import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.common.ParseField;
import org.opensearch.common.geo.GeoBoundingBox;
Expand Down Expand Up @@ -125,9 +124,7 @@ public GeoGridAggregationBuilder(StreamInput in) throws IOException {
precision = in.readVInt();
requiredSize = in.readVInt();
shardSize = in.readVInt();
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox = new GeoBoundingBox(in);
}
geoBoundingBox = new GeoBoundingBox(in);
}

@Override
Expand All @@ -140,9 +137,7 @@ protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeVInt(precision);
out.writeVInt(requiredSize);
out.writeVInt(shardSize);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
geoBoundingBox.writeTo(out);
}
geoBoundingBox.writeTo(out);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.index.rankeval;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.ActionRequest;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.IndicesRequest;
Expand Down Expand Up @@ -69,9 +68,7 @@ public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) {
rankingEvaluationSpec = new RankEvalSpec(in);
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
searchType = SearchType.fromId(in.readByte());
}
searchType = SearchType.fromId(in.readByte());
}

RankEvalRequest() {}
Expand Down Expand Up @@ -150,9 +147,7 @@ public void writeTo(StreamOutput out) throws IOException {
rankingEvaluationSpec.writeTo(out);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) {
out.writeByte(searchType.id());
}
out.writeByte(searchType.id());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -764,12 +764,8 @@ public void testAutoExpandIndicesDuringRollingUpgrade() throws Exception {

final int numberOfReplicas = Integer.parseInt(
getIndexSettingsAsMap(indexName).get(IndexMetadata.SETTING_NUMBER_OF_REPLICAS).toString());
if (minimumNodeVersion.onOrAfter(LegacyESVersion.V_7_6_0)) {
assertEquals(nodes.size() - 2, numberOfReplicas);
ensureGreen(indexName);
} else {
assertEquals(nodes.size() - 1, numberOfReplicas);
}
assertEquals(nodes.size() - 2, numberOfReplicas);
ensureGreen(indexName);
}

public void testSoftDeletesDisabledWarning() throws Exception {
Expand Down
5 changes: 0 additions & 5 deletions server/src/main/java/org/opensearch/LegacyESVersion.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@
*/
public class LegacyESVersion extends Version {

public static final LegacyESVersion V_7_6_0 = new LegacyESVersion(7060099, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_1 = new LegacyESVersion(7060199, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_6_2 = new LegacyESVersion(7060299, org.apache.lucene.util.Version.LUCENE_8_4_0);
public static final LegacyESVersion V_7_7_0 = new LegacyESVersion(7070099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_7_1 = new LegacyESVersion(7070199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_8_0 = new LegacyESVersion(7080099, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_8_1 = new LegacyESVersion(7080199, org.apache.lucene.util.Version.LUCENE_8_5_1);
public static final LegacyESVersion V_7_9_0 = new LegacyESVersion(7090099, org.apache.lucene.util.Version.LUCENE_8_6_0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.info;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
Expand Down Expand Up @@ -63,22 +62,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
public NodesInfoRequest(StreamInput in) throws IOException {
super(in);
requestedMetrics.clear();
if (in.getVersion().before(LegacyESVersion.V_7_7_0)) {
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
optionallyAddMetric(in.readBoolean(), Metric.SETTINGS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.OS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PROCESS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.JVM.metricName());
optionallyAddMetric(in.readBoolean(), Metric.THREAD_POOL.metricName());
optionallyAddMetric(in.readBoolean(), Metric.TRANSPORT.metricName());
optionallyAddMetric(in.readBoolean(), Metric.HTTP.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PLUGINS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INDICES.metricName());
} else {
requestedMetrics.addAll(Arrays.asList(in.readStringArray()));
}
requestedMetrics.addAll(Arrays.asList(in.readStringArray()));
}

/**
Expand Down Expand Up @@ -165,22 +149,7 @@ private void optionallyAddMetric(boolean addMetric, String metricName) {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(LegacyESVersion.V_7_7_0)) {
// prior to version 8.x, a NodesInfoRequest was serialized as a list
// of booleans in a fixed order
out.writeBoolean(Metric.SETTINGS.containedIn(requestedMetrics));
out.writeBoolean(Metric.OS.containedIn(requestedMetrics));
out.writeBoolean(Metric.PROCESS.containedIn(requestedMetrics));
out.writeBoolean(Metric.JVM.containedIn(requestedMetrics));
out.writeBoolean(Metric.THREAD_POOL.containedIn(requestedMetrics));
out.writeBoolean(Metric.TRANSPORT.containedIn(requestedMetrics));
out.writeBoolean(Metric.HTTP.containedIn(requestedMetrics));
out.writeBoolean(Metric.PLUGINS.containedIn(requestedMetrics));
out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics));
out.writeBoolean(Metric.INDICES.containedIn(requestedMetrics));
} else {
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.reload;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;

Expand Down Expand Up @@ -68,18 +67,16 @@ public NodesReloadSecureSettingsRequest() {

public NodesReloadSecureSettingsRequest(StreamInput in) throws IOException {
super(in);
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_7_0)) {
final BytesReference bytesRef = in.readOptionalBytesReference();
if (bytesRef != null) {
byte[] bytes = BytesReference.toBytes(bytesRef);
try {
this.secureSettingsPassword = new SecureString(CharArrays.utf8BytesToChars(bytes));
} finally {
Arrays.fill(bytes, (byte) 0);
}
} else {
this.secureSettingsPassword = null;
final BytesReference bytesRef = in.readOptionalBytesReference();
if (bytesRef != null) {
byte[] bytes = BytesReference.toBytes(bytesRef);
try {
this.secureSettingsPassword = new SecureString(CharArrays.utf8BytesToChars(bytes));
} finally {
Arrays.fill(bytes, (byte) 0);
}
} else {
this.secureSettingsPassword = null;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.stats;

import org.opensearch.LegacyESVersion;
import org.opensearch.action.admin.indices.stats.CommonStatsFlags;
import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.common.io.stream.StreamInput;
Expand Down Expand Up @@ -64,22 +63,7 @@ public NodesStatsRequest(StreamInput in) throws IOException {

indices = new CommonStatsFlags(in);
requestedMetrics.clear();
if (in.getVersion().before(LegacyESVersion.V_7_7_0)) {
optionallyAddMetric(in.readBoolean(), Metric.OS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.PROCESS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.JVM.metricName());
optionallyAddMetric(in.readBoolean(), Metric.THREAD_POOL.metricName());
optionallyAddMetric(in.readBoolean(), Metric.FS.metricName());
optionallyAddMetric(in.readBoolean(), Metric.TRANSPORT.metricName());
optionallyAddMetric(in.readBoolean(), Metric.HTTP.metricName());
optionallyAddMetric(in.readBoolean(), Metric.BREAKER.metricName());
optionallyAddMetric(in.readBoolean(), Metric.SCRIPT.metricName());
optionallyAddMetric(in.readBoolean(), Metric.DISCOVERY.metricName());
optionallyAddMetric(in.readBoolean(), Metric.INGEST.metricName());
optionallyAddMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName());
} else {
requestedMetrics.addAll(in.readStringList());
}
requestedMetrics.addAll(in.readStringList());
}

/**
Expand Down Expand Up @@ -200,22 +184,7 @@ private void optionallyAddMetric(boolean includeMetric, String metricName) {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
indices.writeTo(out);
if (out.getVersion().before(LegacyESVersion.V_7_7_0)) {
out.writeBoolean(Metric.OS.containedIn(requestedMetrics));
out.writeBoolean(Metric.PROCESS.containedIn(requestedMetrics));
out.writeBoolean(Metric.JVM.containedIn(requestedMetrics));
out.writeBoolean(Metric.THREAD_POOL.containedIn(requestedMetrics));
out.writeBoolean(Metric.FS.containedIn(requestedMetrics));
out.writeBoolean(Metric.TRANSPORT.containedIn(requestedMetrics));
out.writeBoolean(Metric.HTTP.containedIn(requestedMetrics));
out.writeBoolean(Metric.BREAKER.containedIn(requestedMetrics));
out.writeBoolean(Metric.SCRIPT.containedIn(requestedMetrics));
out.writeBoolean(Metric.DISCOVERY.containedIn(requestedMetrics));
out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics));
out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics));
} else {
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}
out.writeStringArray(requestedMetrics.toArray(new String[0]));
}

/**
Expand Down
Loading

0 comments on commit 166612b

Please sign in to comment.