Skip to content

Commit

Permalink
Remove 2.x backward compatibility of mappings.
Browse files Browse the repository at this point in the history
For the record, I also had to remove the geo-hash cell and geo-distance range
queries to make the code compile. These queries already throw an exception in
all cases with 5.x indices, so that does not hurt any more.

I also had to rename all 2.x bwc indices from `index-${version}` to
`unsupported-${version}` to make `OldIndexBackwardCompatibilityIT`
happy.
  • Loading branch information
jpountz committed Nov 30, 2016
1 parent 235e6ac commit b53acfe
Show file tree
Hide file tree
Showing 279 changed files with 466 additions and 24,538 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.StringFieldType;
Expand Down Expand Up @@ -336,11 +335,7 @@ private Query getRangeQuerySingle(String field, String part1, String part2,
BytesRef part1Binary = part1 == null ? null : getAnalyzer().normalize(field, part1);
BytesRef part2Binary = part2 == null ? null : getAnalyzer().normalize(field, part2);
Query rangeQuery;
if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) {
LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary,
startInclusive, endInclusive, settings.timeZone(), null, context);
} else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary,
startInclusive, endInclusive, settings.timeZone(), null, context);
Expand Down
20 changes: 3 additions & 17 deletions core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
Original file line number Diff line number Diff line change
Expand Up @@ -305,8 +305,6 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null
String parent = null;
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
String[] fields = defaultFields;
String timestamp = null;
TimeValue ttl = null;
String opType = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
Expand Down Expand Up @@ -336,14 +334,6 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text();
} else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) {
timestamp = parser.text();
} else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName);
} else {
ttl = new TimeValue(parser.longValue());
}
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
opType = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
Expand Down Expand Up @@ -394,15 +384,15 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null
// of index request.
if ("index".equals(action)) {
if (opType == null) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
.setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload);
} else {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
.create("create".equals(opType)).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
}
} else if ("create".equals(action)) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
.create(true).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
} else if ("update".equals(action)) {
Expand All @@ -420,15 +410,11 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null

IndexRequest upsertRequest = updateRequest.upsertRequest();
if (upsertRequest != null) {
upsertRequest.timestamp(timestamp);
upsertRequest.ttl(ttl);
upsertRequest.version(version);
upsertRequest.versionType(versionType);
}
IndexRequest doc = updateRequest.doc();
if (doc != null) {
doc.timestamp(timestamp);
doc.ttl(ttl);
doc.version(version);
doc.versionType(versionType);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
package org.elasticsearch.action.index;

import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.MappingMetaData;
Expand All @@ -41,7 +41,6 @@
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.TimestampFieldMapper;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
Expand Down Expand Up @@ -75,10 +74,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
private String routing;
@Nullable
private String parent;
@Nullable
private String timestamp;
@Nullable
private TimeValue ttl;

private BytesReference source;

Expand Down Expand Up @@ -164,12 +159,6 @@ public ActionRequestValidationException validate() {
validationException = addValidationError("version type [force] may no longer be used", validationException);
}

if (ttl != null) {
if (ttl.millis() < 0) {
validationException = addValidationError("ttl must not be negative", validationException);
}
}

if (id != null && id.getBytes(StandardCharsets.UTF_8).length > 512) {
validationException = addValidationError("id is too long, must be no longer than 512 bytes but was: " +
id.getBytes(StandardCharsets.UTF_8).length, validationException);
Expand Down Expand Up @@ -265,49 +254,6 @@ public String parent() {
return this.parent;
}

/**
* Sets the timestamp either as millis since the epoch, or, in the configured date format.
*/
public IndexRequest timestamp(String timestamp) {
this.timestamp = timestamp;
return this;
}

public String timestamp() {
return this.timestamp;
}

/**
* Sets the ttl value as a time value expression.
*/
public IndexRequest ttl(String ttl) {
this.ttl = TimeValue.parseTimeValue(ttl, null, "ttl");
return this;
}

/**
* Sets the ttl as a {@link TimeValue} instance.
*/
public IndexRequest ttl(TimeValue ttl) {
this.ttl = ttl;
return this;
}

/**
* Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise.
*/
public IndexRequest ttl(long ttl) {
this.ttl = new TimeValue(ttl);
return this;
}

/**
* Returns the ttl as a {@link TimeValue}
*/
public TimeValue ttl() {
return this.ttl;
}

/**
* Sets the ingest pipeline to be executed before indexing the document
*/
Expand Down Expand Up @@ -537,11 +483,6 @@ public VersionType versionType() {


public void process(@Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) {
// resolve timestamp if provided externally
if (timestamp != null) {
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp,
mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER);
}
if (mappingMd != null) {
// might as well check for routing here
if (mappingMd.routing().required() && routing == null) {
Expand All @@ -563,30 +504,6 @@ public void process(@Nullable MappingMetaData mappingMd, boolean allowIdGenerati
autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia
id(UUIDs.base64UUID());
}

// generate timestamp if not provided, we always have one post this stage...
if (timestamp == null) {
String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP;
if (mappingMd != null && mappingMd.timestamp() != null) {
// If we explicitly ask to reject null timestamp
if (mappingMd.timestamp().ignoreMissing() != null && mappingMd.timestamp().ignoreMissing() == false) {
throw new TimestampParsingException("timestamp is required by mapping");
}
defaultTimestamp = mappingMd.timestamp().defaultTimestamp();
}

if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) {
timestamp = Long.toString(System.currentTimeMillis());
} else {
// if we are here, the defaultTimestamp is not
// TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP but
// this can only happen if defaultTimestamp was
// assigned again because mappingMd and
// mappingMd#timestamp() are not null
assert mappingMd != null;
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter());
}
}
}

/* resolve the routing if needed */
Expand All @@ -601,8 +518,10 @@ public void readFrom(StreamInput in) throws IOException {
id = in.readOptionalString();
routing = in.readOptionalString();
parent = in.readOptionalString();
timestamp = in.readOptionalString();
ttl = in.readOptionalWriteable(TimeValue::new);
if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) {
in.readOptionalString(); // timestamp
in.readOptionalWriteable(TimeValue::new); // ttl
}
source = in.readBytesReference();
opType = OpType.fromId(in.readByte());
version = in.readLong();
Expand All @@ -619,8 +538,10 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
out.writeOptionalString(timestamp);
out.writeOptionalWriteable(ttl);
if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) {
out.writeOptionalString(null);
out.writeOptionalWriteable(null);
}
out.writeBytesReference(source);
out.writeByte(opType.getId());
out.writeLong(version);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
Expand Down Expand Up @@ -231,38 +230,6 @@ public IndexRequestBuilder setVersionType(VersionType versionType) {
return this;
}

/**
* Sets the timestamp either as millis since the epoch, or, in the configured date format.
*/
public IndexRequestBuilder setTimestamp(String timestamp) {
request.timestamp(timestamp);
return this;
}

/**
* Sets the ttl value as a time value expression.
*/
public IndexRequestBuilder setTTL(String ttl) {
request.ttl(ttl);
return this;
}

/**
* Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise.
*/
public IndexRequestBuilder setTTL(long ttl) {
request.ttl(ttl);
return this;
}

/**
* Sets the ttl as a {@link TimeValue} instance.
*/
public IndexRequestBuilder setTTL(TimeValue ttl) {
request.ttl(ttl);
return this;
}

/**
* Sets the ingest pipeline to be executed before indexing the document
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ protected WriteReplicaResult shardOperationOnReplica(IndexRequest request, Index
public static Engine.IndexResult executeIndexRequestOnReplica(IndexRequest request, IndexShard replica) {
final ShardId shardId = replica.shardId();
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, shardId.getIndexName(), request.type(), request.id(), request.source())
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
.routing(request.routing()).parent(request.parent());

final Engine.Index operation;
try {
Expand All @@ -189,7 +189,7 @@ public static Engine.IndexResult executeIndexRequestOnReplica(IndexRequest reque
/** Utility method to prepare an index operation on primary shards */
static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard primary) {
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.index(), request.type(), request.id(), request.source())
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
.routing(request.routing()).parent(request.parent());
return primary.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType(), request.getAutoGeneratedTimestamp(), request.isRetry());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,6 @@ private static List<IngestDocument> parseDocs(Map<String, Object> config) {
ConfigurationUtils.readStringProperty(null, null, dataMap, MetaData.ID.getFieldName(), "_id"),
ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.ROUTING.getFieldName()),
ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.PARENT.getFieldName()),
ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TIMESTAMP.getFieldName()),
ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TTL.getFieldName()),
document);
ingestDocumentList.add(ingestDocument);
}
Expand Down
Loading

0 comments on commit b53acfe

Please sign in to comment.