Skip to content

Commit

Permalink
refactoring CodecService for 2.x
Browse files Browse the repository at this point in the history
Signed-off-by: Sarthak Aggarwal <sarthagg@amazon.com>
  • Loading branch information
sarthakaggarwal97 committed Jul 10, 2023
1 parent 3ede2b1 commit 4bb4c09
Show file tree
Hide file tree
Showing 4 changed files with 66 additions and 3 deletions.
1 change: 0 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- [Refactor] OpenSearchException and ExceptionsHelper foundation to base class ([#7508](https://github.com/opensearch-project/OpenSearch/pull/7508))
- Move ZSTD compression codecs out of the sandbox ([#7908](https://github.com/opensearch-project/OpenSearch/pull/7908))
- Update ZSTD default compression level ([#8471](https://github.com/opensearch-project/OpenSearch/pull/8471))
- [Search Pipelines] Pass pipeline creation context to processor factories ([#8164](https://github.com/opensearch-project/OpenSearch/pull/8164))
- Enabling compression levels for zstd and zstd_no_dict ([#8312](https://github.com/opensearch-project/OpenSearch/pull/8312))
- Improved performance of parsing floating point numbers ([#8467](https://github.com/opensearch-project/OpenSearch/pull/8467))
- Move span actions to Scope ([#8411](https://github.com/opensearch-project/OpenSearch/pull/8411))
Expand Down
Empty file.
26 changes: 26 additions & 0 deletions server/src/main/java/org/opensearch/index/codec/CodecService.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.opensearch.common.Nullable;
import org.opensearch.common.collect.MapBuilder;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.codec.customcodecs.Lucene95CustomCodec;
import org.opensearch.index.codec.customcodecs.ZstdCodec;
import org.opensearch.index.codec.customcodecs.ZstdNoDictCodec;
import org.opensearch.index.mapper.MapperService;
Expand Down Expand Up @@ -90,6 +91,31 @@ public CodecService(@Nullable MapperService mapperService, IndexSettings indexSe
this.codecs = codecs.immutableMap();
}

@Deprecated(since = "2.9.0", forRemoval = true)
public CodecService(@Nullable MapperService mapperService, Logger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene95Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene95Codec(Mode.BEST_COMPRESSION));
codecs.put(ZSTD_CODEC, new ZstdCodec());
codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDictCodec());
} else {
IndexSettings indexSettings = mapperService.getIndexSettings();
int compressionLevel = indexSettings == null
? Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL
: indexSettings.getValue(INDEX_CODEC_COMPRESSION_LEVEL_SETTING);
codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));
codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger));
codecs.put(ZSTD_CODEC, new ZstdCodec(mapperService, logger, compressionLevel));
codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDictCodec(mapperService, logger, compressionLevel));
}
codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault());
for (String codec : Codec.availableCodecs()) {
codecs.put(codec, Codec.forName(codec));
}
this.codecs = codecs.immutableMap();
}

public Codec codec(String name) {
Codec codec = codecs.get(name);
if (codec == null) {
Expand Down
42 changes: 40 additions & 2 deletions server/src/test/java/org/opensearch/index/codec/CodecTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,40 @@ public void testExceptionIndexSettingsNull() {
assertThrows(AssertionError.class, () -> new CodecService(null, null, LogManager.getLogger("test")));
}

public void testCodecServiceWithNullMapperService() {
CodecService codecService = new CodecService(null, null, LogManager.getLogger("test"));
assert codecService.codec("default") instanceof Lucene95Codec;
assert codecService.codec("best_compression") instanceof Lucene95Codec;
Lucene95CustomStoredFieldsFormat zstdStoredFieldsFormat = (Lucene95CustomStoredFieldsFormat) codecService.codec("zstd")
.storedFieldsFormat();
Lucene95CustomStoredFieldsFormat zstdNoDictStoredFieldsFormat = (Lucene95CustomStoredFieldsFormat) codecService.codec("zstd")
.storedFieldsFormat();
assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, zstdStoredFieldsFormat.getCompressionLevel());
assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, zstdNoDictStoredFieldsFormat.getCompressionLevel());
}

public void testCodecServiceWithOnlyMapperService() throws IOException {
int randomCompressionLevel = randomIntBetween(1, 6);
Settings nodeSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put("index.codec.compression_level", randomCompressionLevel)
.build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("_na", nodeSettings);
CodecService codecService = new CodecService(
getMapperService(indexSettings, nodeSettings),
indexSettings,
LogManager.getLogger("test")
);
assert codecService.codec("default") instanceof PerFieldMappingPostingFormatCodec;
assert codecService.codec("best_compression") instanceof PerFieldMappingPostingFormatCodec;
Lucene95CustomStoredFieldsFormat zstdStoredFieldsFormat = (Lucene95CustomStoredFieldsFormat) codecService.codec("zstd")
.storedFieldsFormat();
Lucene95CustomStoredFieldsFormat zstdNoDictStoredFieldsFormat = (Lucene95CustomStoredFieldsFormat) codecService.codec("zstd")
.storedFieldsFormat();
assertEquals(randomCompressionLevel, zstdStoredFieldsFormat.getCompressionLevel());
assertEquals(randomCompressionLevel, zstdNoDictStoredFieldsFormat.getCompressionLevel());
}

// write some docs with it, inspect .si to see this was the used compression
private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception {
SegmentReader sr = getSegmentReader(actual);
Expand Down Expand Up @@ -176,10 +210,15 @@ private CodecService createCodecService(int randomCompressionLevel) throws IOExc
private CodecService buildCodecService(Settings nodeSettings) throws IOException {

IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("_na", nodeSettings);
MapperService mapperService = getMapperService(indexSettings, nodeSettings);
return new CodecService(mapperService, indexSettings, LogManager.getLogger("test"));
}

private MapperService getMapperService(IndexSettings indexSettings, Settings nodeSettings) throws IOException {
SimilarityService similarityService = new SimilarityService(indexSettings, null, Collections.emptyMap());
IndexAnalyzers indexAnalyzers = createTestAnalysis(indexSettings, nodeSettings).indexAnalyzers;
MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER);
MapperService service = new MapperService(
return new MapperService(
indexSettings,
indexAnalyzers,
xContentRegistry(),
Expand All @@ -189,7 +228,6 @@ private CodecService buildCodecService(Settings nodeSettings) throws IOException
() -> false,
null
);
return new CodecService(service, indexSettings, LogManager.getLogger("test"));
}

private SegmentReader getSegmentReader(Codec codec) throws IOException {
Expand Down

0 comments on commit 4bb4c09

Please sign in to comment.