Skip to content

Commit

Permalink
remove backcompat handling of 6.2.x versions
Browse files Browse the repository at this point in the history
relates to refactoring initiative elastic#41164.
  • Loading branch information
talevy committed May 9, 2019
1 parent 8270c80 commit 581d44d
Show file tree
Hide file tree
Showing 22 changed files with 76 additions and 230 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1006,7 +1006,7 @@ private enum ElasticsearchExceptionHandle {
UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.UnknownNamedObjectException.class,
org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, UNKNOWN_VERSION_ADDED),
TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class,
MultiBucketConsumerService.TooManyBucketsException::new, 149, Version.V_6_2_0),
MultiBucketConsumerService.TooManyBucketsException::new, 149, UNKNOWN_VERSION_ADDED),
COORDINATION_STATE_REJECTED_EXCEPTION(org.elasticsearch.cluster.coordination.CoordinationStateRejectedException.class,
org.elasticsearch.cluster.coordination.CoordinationStateRejectedException::new, 150, Version.V_7_0_0),
SNAPSHOT_IN_PROGRESS_EXCEPTION(org.elasticsearch.snapshots.SnapshotInProgressException.class,
Expand Down
20 changes: 0 additions & 20 deletions server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_6_1_4 = new Version(V_6_1_4_ID, org.apache.lucene.util.Version.LUCENE_7_1_0);
// The below version is missing from the 7.3 JAR
private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1);
public static final int V_6_2_0_ID = 6020099;
public static final Version V_6_2_0 = new Version(V_6_2_0_ID, LUCENE_7_2_1);
public static final int V_6_2_1_ID = 6020199;
public static final Version V_6_2_1 = new Version(V_6_2_1_ID, LUCENE_7_2_1);
public static final int V_6_2_2_ID = 6020299;
public static final Version V_6_2_2 = new Version(V_6_2_2_ID, LUCENE_7_2_1);
public static final int V_6_2_3_ID = 6020399;
public static final Version V_6_2_3 = new Version(V_6_2_3_ID, LUCENE_7_2_1);
public static final int V_6_2_4_ID = 6020499;
public static final Version V_6_2_4 = new Version(V_6_2_4_ID, LUCENE_7_2_1);
public static final int V_6_3_0_ID = 6030099;
public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_1);
public static final int V_6_3_1_ID = 6030199;
Expand Down Expand Up @@ -192,16 +182,6 @@ public static Version fromId(int id) {
return V_6_3_1;
case V_6_3_0_ID:
return V_6_3_0;
case V_6_2_4_ID:
return V_6_2_4;
case V_6_2_3_ID:
return V_6_2_3;
case V_6_2_2_ID:
return V_6_2_2;
case V_6_2_1_ID:
return V_6_2_1;
case V_6_2_0_ID:
return V_6_2_0;
case V_6_1_4_ID:
return V_6_1_4;
case V_6_1_3_ID:
Expand Down
19 changes: 2 additions & 17 deletions server/src/main/java/org/elasticsearch/index/store/Store.java
Original file line number Diff line number Diff line change
Expand Up @@ -1550,23 +1550,8 @@ public void trimUnsafeCommits(final long lastSyncedGlobalCheckpoint, final long
final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1);
final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY);
final IndexCommit startingIndexCommit;
// We may not have a safe commit if an index was create before v6.2; and if there is a snapshotted commit whose translog
// are not retained but max_seqno is at most the global checkpoint, we may mistakenly select it as a starting commit.
// To avoid this issue, we only select index commits whose translog are fully retained.
if (indexVersionCreated.before(org.elasticsearch.Version.V_6_2_0)) {
final List<IndexCommit> recoverableCommits = new ArrayList<>();
for (IndexCommit commit : existingCommits) {
if (minRetainedTranslogGen <= Long.parseLong(commit.getUserData().get(Translog.TRANSLOG_GENERATION_KEY))) {
recoverableCommits.add(commit);
}
}
assert recoverableCommits.isEmpty() == false : "No commit point with translog found; " +
"commits [" + existingCommits + "], minRetainedTranslogGen [" + minRetainedTranslogGen + "]";
startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(recoverableCommits, lastSyncedGlobalCheckpoint);
} else {
// TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint.
startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint);
}
// TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint.
startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint);

if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) {
throw new IllegalStateException("starting commit translog uuid ["
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -594,10 +594,6 @@ static final class PreSyncedFlushResponse extends TransportResponse {
this.existingSyncId = existingSyncId;
}

boolean includeNumDocs(Version version) {
return version.onOrAfter(Version.V_6_2_2);
}

boolean includeExistingSyncId(Version version) {
return version.onOrAfter(Version.V_6_3_0);
}
Expand All @@ -606,11 +602,7 @@ boolean includeExistingSyncId(Version version) {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
commitId = new Engine.CommitId(in);
if (includeNumDocs(in.getVersion())) {
numDocs = in.readInt();
} else {
numDocs = UNKNOWN_NUM_DOCS;
}
numDocs = in.readInt();
if (includeExistingSyncId(in.getVersion())) {
existingSyncId = in.readOptionalString();
}
Expand All @@ -620,9 +612,7 @@ public void readFrom(StreamInput in) throws IOException {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
commitId.writeTo(out);
if (includeNumDocs(out.getVersion())) {
out.writeInt(numDocs);
}
out.writeInt(numDocs);
if (includeExistingSyncId(out.getVersion())) {
out.writeOptionalString(existingSyncId);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ public void testCircuitBreakingException() throws IOException {
}

public void testTooManyBucketsException() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.CURRENT);
Version version = VersionUtils.randomCompatibleVersion(random(), Version.CURRENT);
MultiBucketConsumerService.TooManyBucketsException ex =
serialize(new MultiBucketConsumerService.TooManyBucketsException("Too many buckets", 100), version);
assertEquals("Too many buckets", ex.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,8 @@ public void testLuceneVersionOnUnknownVersions() {
version.luceneVersion);

// between two known versions, should use the lucene version of the previous version
version = Version.fromString("6.2.50");
assertEquals(VersionUtils.getPreviousVersion(Version.V_6_2_4).luceneVersion, version.luceneVersion);
version = Version.fromString("8.0.50");
assertEquals(VersionUtils.getPreviousVersion(Version.V_8_0_0).luceneVersion, version.luceneVersion);

// too old version, major should be the oldest supported lucene version minus 1
version = Version.fromString("5.2.1");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,16 @@ public void testThatInstancesAreCachedAndReused() {
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT),
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
// same es version should be cached
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1),
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1));
Version versionA = randomVersion(random());
Version versionB = randomValueOtherThan(versionA, () -> randomVersion(random()));
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(versionA),
PreBuiltAnalyzers.STANDARD.getAnalyzer(versionA));
assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_0),
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_1));

// Same Lucene version should be cached:
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_1),
PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_2));
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_7_2_0),
PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_8_0_0));
}

public void testThatAnalyzersAreUsedInMapping() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -461,7 +462,7 @@ public void testSerializationBackcompat() throws IOException {

SliceBuilder copy62 = copyWriteable(sliceBuilder,
new NamedWriteableRegistry(Collections.emptyList()),
SliceBuilder::new, Version.V_6_2_0);
SliceBuilder::new, VersionUtils.getPreviousVersion(Version.V_6_3_0));
assertEquals(sliceBuilder, copy62);

SliceBuilder copy63 = copyWriteable(copy62,
Expand Down Expand Up @@ -496,7 +497,8 @@ public void testToFilterWithRouting() throws IOException {
assertEquals(new DocValuesSliceQuery("field", 6, 10), query);
query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context, Version.CURRENT);
assertEquals(new DocValuesSliceQuery("field", 6, 10), query);
query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context, Version.V_6_2_0);
query = builder.toFilter(clusterService, createRequest(1, Strings.EMPTY_ARRAY, "foo"), context,
VersionUtils.getPreviousVersion(Version.V_6_3_0));
assertEquals(new DocValuesSliceQuery("field", 1, 2), query);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;
import java.util.Map;
Expand Down Expand Up @@ -93,7 +94,7 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio
parser = createParser(YamlXContent.yamlXContent,
"\"First test section\": \n" +
" - skip:\n" +
" version: \"6.0.0 - 6.2.0\"\n" +
" version: \"" + VersionUtils.getPreviousVersion() + " - " + Version.CURRENT + "\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" +
" - do :\n" +
" catch: missing\n" +
Expand All @@ -108,9 +109,9 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio
assertThat(testSection, notNullValue());
assertThat(testSection.getName(), equalTo("First test section"));
assertThat(testSection.getSkipSection(), notNullValue());
assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0));
assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(VersionUtils.getPreviousVersion()));
assertThat(testSection.getSkipSection().getUpperVersion(),
equalTo(Version.V_6_2_0));
equalTo(Version.CURRENT));
assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259"));
assertThat(testSection.getExecutableSections().size(), equalTo(2));
DoSection doSection = (DoSection)testSection.getExecutableSections().get(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,7 @@ public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
jobId = in.readString();
update = new JobUpdate(in);
if (in.getVersion().onOrAfter(Version.V_6_2_2)) {
isInternal = in.readBoolean();
} else {
isInternal = false;
}
isInternal = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_6_3_0) && in.getVersion().before(Version.V_7_0_0)) {
in.readBoolean(); // was waitForAck
}
Expand All @@ -107,9 +103,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(jobId);
update.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_6_2_2)) {
out.writeBoolean(isInternal);
}
out.writeBoolean(isInternal);
if (out.getVersion().onOrAfter(Version.V_6_3_0) && out.getVersion().before(Version.V_7_0_0)) {
out.writeBoolean(false); // was waitForAck
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,8 @@ public Request(StreamInput in) throws IOException {
if (in.readBoolean()) {
detectorUpdates = in.readList(JobUpdate.DetectorUpdate::new);
}
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
filter = in.readOptionalWriteable(MlFilter::new);
updateScheduledEvents = in.readBoolean();
}
filter = in.readOptionalWriteable(MlFilter::new);
updateScheduledEvents = in.readBoolean();
}

@Override
Expand All @@ -136,10 +134,8 @@ public void writeTo(StreamOutput out) throws IOException {
if (hasDetectorUpdates) {
out.writeList(detectorUpdates);
}
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
out.writeOptionalWriteable(filter);
out.writeBoolean(updateScheduledEvents);
}
out.writeOptionalWriteable(filter);
out.writeBoolean(updateScheduledEvents);
}

public Request(String jobId, ModelPlotConfig modelPlotConfig, List<JobUpdate.DetectorUpdate> detectorUpdates, MlFilter filter,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,16 +222,8 @@ public DatafeedConfig(StreamInput in) throws IOException {
}
this.scrollSize = in.readOptionalVInt();
this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new);
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString));
} else {
this.headers = Collections.emptyMap();
}
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
delayedDataCheckConfig = in.readOptionalWriteable(DelayedDataCheckConfig::new);
} else {
delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig();
}
this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString));
delayedDataCheckConfig = in.readOptionalWriteable(DelayedDataCheckConfig::new);
}

/**
Expand Down Expand Up @@ -432,9 +424,7 @@ public void writeTo(StreamOutput out) throws IOException {
}
out.writeOptionalVInt(scrollSize);
out.writeOptionalWriteable(chunkingConfig);
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString);
}
out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString);
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeOptionalWriteable(delayedDataCheckConfig);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.core.ml.job.config;

import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
Expand Down Expand Up @@ -126,11 +125,7 @@ public AnalysisConfig(StreamInput in) throws IOException {
bucketSpan = in.readTimeValue();
categorizationFieldName = in.readOptionalString();
categorizationFilters = in.readBoolean() ? Collections.unmodifiableList(in.readStringList()) : null;
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
categorizationAnalyzerConfig = in.readOptionalWriteable(CategorizationAnalyzerConfig::new);
} else {
categorizationAnalyzerConfig = null;
}
categorizationAnalyzerConfig = in.readOptionalWriteable(CategorizationAnalyzerConfig::new);
latency = in.readOptionalTimeValue();
summaryCountFieldName = in.readOptionalString();
detectors = Collections.unmodifiableList(in.readList(Detector::new));
Expand All @@ -149,9 +144,7 @@ public void writeTo(StreamOutput out) throws IOException {
} else {
out.writeBoolean(false);
}
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
out.writeOptionalWriteable(categorizationAnalyzerConfig);
}
out.writeOptionalWriteable(categorizationAnalyzerConfig);
out.writeOptionalTimeValue(latency);
out.writeOptionalString(summaryCountFieldName);
out.writeList(detectors);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,12 +138,8 @@ public Bucket(StreamInput in) throws IOException {
if (in.getVersion().before(Version.V_6_5_0)) {
in.readList(Bucket::readOldPerPartitionNormalization);
}
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
scheduledEvents = in.readStringList();
if (scheduledEvents.isEmpty()) {
scheduledEvents = Collections.emptyList();
}
} else {
scheduledEvents = in.readStringList();
if (scheduledEvents.isEmpty()) {
scheduledEvents = Collections.emptyList();
}
}
Expand All @@ -164,9 +160,7 @@ public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().before(Version.V_6_5_0)) {
out.writeList(Collections.emptyList());
}
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
out.writeStringCollection(scheduledEvents);
}
out.writeStringCollection(scheduledEvents);
}

@Override
Expand Down
Loading

0 comments on commit 581d44d

Please sign in to comment.