-
Notifications
You must be signed in to change notification settings - Fork 24.9k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Increase max number of dimensions from 16 to 21 #95340
Changes from 5 commits
197377e
97a131f
ee7abd3
221adaf
9b3e023
7549da3
81e8705
258a06f
0aa622e
7947a71
164bfa3
965ecc3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,182 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0 and the Server Side Public License, v 1; you may not use this file except | ||
* in compliance with, at your election, the Elastic License 2.0 or the Server | ||
* Side Public License, v 1. | ||
*/ | ||
|
||
package org.elasticsearch.timeseries.support; | ||
|
||
import org.elasticsearch.action.index.IndexResponse; | ||
import org.elasticsearch.cluster.metadata.IndexMetadata; | ||
import org.elasticsearch.common.settings.Settings; | ||
import org.elasticsearch.core.CheckedConsumer; | ||
import org.elasticsearch.index.IndexMode; | ||
import org.elasticsearch.index.IndexSettings; | ||
import org.elasticsearch.index.mapper.DocumentParsingException; | ||
import org.elasticsearch.index.mapper.MapperService; | ||
import org.elasticsearch.rest.RestStatus; | ||
import org.elasticsearch.test.ESIntegTestCase; | ||
import org.elasticsearch.xcontent.XContentBuilder; | ||
import org.elasticsearch.xcontent.json.JsonXContent; | ||
|
||
import java.io.IOException; | ||
import java.time.Instant; | ||
import java.util.ArrayList; | ||
import java.util.HashMap; | ||
import java.util.List; | ||
import java.util.Map; | ||
import java.util.function.Supplier; | ||
|
||
import static org.hamcrest.Matchers.equalTo; | ||
|
||
public class TimeSeriesDimensionsLimitIT extends ESIntegTestCase { | ||
|
||
public void testDimensionFieldNameLimit() throws IOException { | ||
int dimensionFieldLimit = 21; | ||
final String dimensionFieldName = randomAlphaOfLength(randomIntBetween(513, 1024)); | ||
createTimeSeriesIndex(mapping -> { | ||
mapping.startObject("routing_field").field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
}, | ||
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(), | ||
() -> List.of("routing_field"), | ||
dimensionFieldLimit | ||
); | ||
final Exception ex = expectThrows( | ||
DocumentParsingException.class, | ||
() -> client().prepareIndex("test") | ||
.setSource( | ||
"routing_field", | ||
randomAlphaOfLength(10), | ||
dimensionFieldName, | ||
randomAlphaOfLength(1536), | ||
"gauge", | ||
randomIntBetween(10, 20), | ||
"@timestamp", | ||
Instant.now().toEpochMilli() | ||
) | ||
.get() | ||
); | ||
assertThat( | ||
ex.getCause().getMessage(), | ||
equalTo( | ||
"Dimension name must be less than [512] bytes but [" + dimensionFieldName + "] was [" + dimensionFieldName.length() + "]." | ||
) | ||
); | ||
} | ||
|
||
public void testDimensionFieldValueLimit() throws IOException { | ||
int dimensionFieldLimit = 21; | ||
createTimeSeriesIndex( | ||
mapping -> mapping.startObject("field").field("type", "keyword").field("time_series_dimension", true).endObject(), | ||
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(), | ||
() -> List.of("field"), | ||
dimensionFieldLimit | ||
); | ||
long startTime = Instant.now().toEpochMilli(); | ||
client().prepareIndex("test") | ||
.setSource("field", randomAlphaOfLength(1536), "gauge", randomIntBetween(10, 20), "@timestamp", startTime) | ||
.get(); | ||
final Exception ex = expectThrows( | ||
DocumentParsingException.class, | ||
() -> client().prepareIndex("test") | ||
.setSource("field", randomAlphaOfLength(1537), "gauge", randomIntBetween(10, 20), "@timestamp", startTime + 1) | ||
.get() | ||
); | ||
assertThat(ex.getCause().getMessage(), equalTo("Dimension fields must be less than [1536] bytes but was [1537].")); | ||
} | ||
|
||
public void testTotalNumberOfDimensionFieldsLimit() { | ||
int dimensionFieldLimit = 21; | ||
final Exception ex = expectThrows(IllegalArgumentException.class, () -> createTimeSeriesIndex(mapping -> { | ||
mapping.startObject("routing_field").field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
for (int i = 0; i < dimensionFieldLimit; i++) { | ||
mapping.startObject(randomAlphaOfLength(10)).field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
} | ||
}, | ||
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(), | ||
() -> List.of("routing_field"), | ||
dimensionFieldLimit | ||
)); | ||
|
||
assertThat(ex.getMessage(), equalTo("Limit of total dimension fields [" + dimensionFieldLimit + "] has been exceeded")); | ||
} | ||
|
||
public void testTotalDimensionFieldsSizeLuceneLimit() throws IOException { | ||
int dimensionFieldLimit = 21; | ||
final List<String> dimensionFieldNames = new ArrayList<>(); | ||
createTimeSeriesIndex(mapping -> { | ||
for (int i = 0; i < dimensionFieldLimit; i++) { | ||
String dimensionFieldName = randomAlphaOfLength(10); | ||
dimensionFieldNames.add(dimensionFieldName); | ||
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
} | ||
}, | ||
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(), | ||
() -> List.of(dimensionFieldNames.get(0)), | ||
dimensionFieldLimit | ||
); | ||
|
||
final Map<String, Object> source = new HashMap<>(); | ||
source.put("gauge", randomIntBetween(10, 20)); | ||
source.put("@timestamp", Instant.now().toEpochMilli()); | ||
for (int i = 0; i < dimensionFieldLimit; i++) { | ||
source.put(dimensionFieldNames.get(i), randomAlphaOfLength(1536)); | ||
} | ||
final IndexResponse indexResponse = client().prepareIndex("test").setSource(source).get(); | ||
assertEquals(RestStatus.CREATED.getStatus(), indexResponse.status().getStatus()); | ||
} | ||
|
||
public void testTotalDimensionFieldsSizeLuceneLimitPlusOne() throws IOException { | ||
int dimensionFieldLimit = 22; | ||
final List<String> dimensionFieldNames = new ArrayList<>(); | ||
createTimeSeriesIndex(mapping -> { | ||
for (int i = 0; i < dimensionFieldLimit; i++) { | ||
String dimensionFieldName = randomAlphaOfLength(10); | ||
dimensionFieldNames.add(dimensionFieldName); | ||
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject(); | ||
} | ||
}, | ||
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(), | ||
() -> List.of(dimensionFieldNames.get(0)), | ||
dimensionFieldLimit | ||
); | ||
|
||
final Map<String, Object> source = new HashMap<>(); | ||
source.put("routing_field", randomAlphaOfLength(1536)); | ||
source.put("gauge", randomIntBetween(10, 20)); | ||
source.put("@timestamp", Instant.now().toEpochMilli()); | ||
for (int i = 0; i < dimensionFieldLimit; i++) { | ||
source.put(dimensionFieldNames.get(i), randomAlphaOfLength(1536)); | ||
} | ||
final Exception ex = expectThrows(DocumentParsingException.class, () -> client().prepareIndex("test").setSource(source).get()); | ||
// NOTE: the number of bytes of the tsid might change slightly, which is why we do not match strings exactly. | ||
assertEquals("_tsid longer than [32766] bytes [34104].".substring(0, 30), ex.getCause().getMessage().substring(0, 30)); | ||
} | ||
|
||
private void createTimeSeriesIndex( | ||
final CheckedConsumer<XContentBuilder, IOException> dimensions, | ||
final CheckedConsumer<XContentBuilder, IOException> metrics, | ||
final Supplier<List<String>> routingPaths, | ||
int dimensionsFieldLimit | ||
) throws IOException { | ||
XContentBuilder mapping = JsonXContent.contentBuilder(); | ||
mapping.startObject().startObject("properties"); | ||
mapping.startObject("@timestamp").field("type", "date").endObject(); | ||
metrics.accept(mapping); | ||
dimensions.accept(mapping); | ||
mapping.endObject().endObject(); | ||
|
||
Settings settings = Settings.builder() | ||
.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) | ||
.putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), routingPaths.get()) | ||
.put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2000-01-08T23:40:53.384Z") | ||
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z") | ||
.put(MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING.getKey(), dimensionsFieldLimit) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Now we always concrete set the limit. Maybe also test with the default? |
||
.build(); | ||
client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); | ||
} | ||
|
||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -67,7 +67,7 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper { | |
* comfortable given that dimensions are typically going to be less than a | ||
* hundred bytes each, but we're being paranoid here. | ||
*/ | ||
private static final int DIMENSION_VALUE_LIMIT = 1024; | ||
private static final int DIMENSION_VALUE_LIMIT = 1536; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don't think we can increase this constant? The limit per field is DIMENSION_VALUE_LIMIT + DIMENSION_NAME_LIMIT. Otherwise we may spend up to ~2kb per field and then we hit the 32kb limit with less then 21 fields? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ok I missed that...the tsid includes both the name and the value. Which means the right way of testing the limit iis to have a field name of 512 bytes and a value that is 1024 with 21 (test ok) or 22 (test ko) dimensions. |
||
|
||
@Override | ||
public FieldMapper.Builder getMergeBuilder() { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe I'm reading this incorrectly. This creates a mapping with 21 dimension fields. But this should succeed? Only if there are 22 dimensions this should fail?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There is the routing_field that is a dimension so it is actually 22. I need to include something in the routing path otherwise the test fails because there is no routing path.