diff --git a/.github/actions/create-bwc-build/action.yaml b/.github/actions/create-bwc-build/action.yaml new file mode 100644 index 0000000..24a674c --- /dev/null +++ b/.github/actions/create-bwc-build/action.yaml @@ -0,0 +1,51 @@ +name: 'Create a backwards compatible ready build' +description: 'Checkouts the official version of a the custom-codecs plugin and builds it so it can be used for BWC tests' + +inputs: + plugin-branch: + description: 'The branch of the plugin that should be built, e.g "2.2", "1.x"' + required: true + +outputs: + built-version: + description: 'The version of OpenSearch that was associated with this branch' + value: ${{ steps.get-opensearch-version.outputs.version }} + +runs: + using: "composite" + steps: + - name: Enable Longpaths if on Windows + if: ${{ runner.os == 'Windows' }} + run: git config --system core.longpaths true + shell: pwsh + + - name: Checkout Branch from Fork + if: ${{ inputs.plugin-branch == 'current_branch' }} + uses: actions/checkout@v2 + with: + path: ${{ inputs.plugin-branch }} + + - uses: actions/checkout@v3 + if: ${{ inputs.plugin-branch != 'current_branch' }} + with: + repository: opensearch-project/custom-codecs + ref: ${{ inputs.plugin-branch }} + path: ${{ inputs.plugin-branch }} + + - name: Build + uses: gradle/gradle-build-action@v2 + with: + cache-disabled: true + arguments: assemble + build-root-directory: ${{ inputs.plugin-branch }} + + - id: get-opensearch-version + uses: peternied/get-opensearch-version@v1 + with: + working-directory: ${{ inputs.plugin-branch }} + + - name: Copy current distro into the expected folder + run: | + mkdir -p ./bwc-test/src/test/resources/${{ steps.get-opensearch-version.outputs.version }} + cp ${{ inputs.plugin-branch }}/build/distributions/opensearch-custom-codecs-${{ steps.get-opensearch-version.outputs.version }}-SNAPSHOT.zip ./bwc-test/src/test/resources/${{ steps.get-opensearch-version.outputs.version }} + shell: bash diff --git a/.github/actions/run-bwc-suite/action.yaml b/.github/actions/run-bwc-suite/action.yaml new file mode 100644 index 0000000..f4c54d5 --- /dev/null +++ b/.github/actions/run-bwc-suite/action.yaml @@ -0,0 +1,57 @@ +name: 'Runs the backward compatiblity test suite' +description: 'Tests backwards compability between a previous and next version of this plugin' + +inputs: + plugin-previous-branch: + description: 'The branch of the plugin that should be built for the previous version, e.g "2.2", "1.x"' + required: true + + plugin-next-branch: + description: 'The branch of the plugin that should be built for the next version, e.g "2.3", "main"' + required: true + + report-artifact-name: + description: 'The name of the artifacts for this run, e.g. "BWC-2.1-to-2.4-results"' + required: true + + username: + description: 'Username to use for cluster health check in testClusters' + required: true + + password: + description: 'Password to use for cluster health check in testClusters' + required: true + +runs: + using: "composite" + steps: + + - id: build-previous + uses: ./.github/actions/create-bwc-build + with: + plugin-branch: ${{ inputs.plugin-previous-branch }} + + - id: build-next + uses: ./.github/actions/create-bwc-build + with: + plugin-branch: ${{ inputs.plugin-next-branch }} + + - name: Run BWC tests + uses: gradle/gradle-build-action@v2 + with: + cache-disabled: true + arguments: | + -p bwc-test + bwcTestSuite + -Dtests.security.manager=true + -Dtests.opensearch.username=${{ inputs.username }} + -Dtests.opensearch.password=${{ inputs.password }} + -Dbwc.version.previous=${{ steps.build-previous.outputs.built-version }} + -Dbwc.version.next=${{ steps.build-next.outputs.built-version }} -i + + - uses: alehechka/upload-tartifact@v2 + if: always() + with: + name: ${{ inputs.report-artifact-name }} + path: | + ./bwc-test/build/reports/ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..15bf998 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,57 @@ +name: Backward Compatibility Checks + +on: + push: + branches: + - 2.x + pull_request: + +env: + GRADLE_OPTS: -Dhttp.keepAlive=true + CI_ENVIRONMENT: normal + +jobs: + + backward-compatibility-build: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin # Temurin is a distribution of adoptium + java-version: 17 + + - name: Checkout custom-codecs Repo + uses: actions/checkout@v4 + + - name: Build BWC tests + uses: gradle/gradle-build-action@v2 + with: + cache-disabled: true + arguments: | + -p bwc-test build -x test -x integTest + + backward-compatibility: + strategy: + fail-fast: false + matrix: + jdk: [11, 17] + platform: [ubuntu-latest, windows-latest] + runs-on: ${{ matrix.platform }} + + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin # Temurin is a distribution of adoptium + java-version: ${{ matrix.jdk }} + + - name: Checkout custom-codecs Repo + uses: actions/checkout@v4 + + - id: build-previous + uses: ./.github/actions/run-bwc-suite + with: + plugin-previous-branch: "2.11" + plugin-next-branch: "current_branch" + report-artifact-name: bwc-${{ matrix.platform }}-jdk${{ matrix.jdk }} + username: admin + password: admin diff --git a/bwc-test/build.gradle b/bwc-test/build.gradle new file mode 100644 index 0000000..ed37fd0 --- /dev/null +++ b/bwc-test/build.gradle @@ -0,0 +1,265 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask +import java.util.concurrent.Callable + +apply plugin: 'opensearch.build' +apply plugin: 'opensearch.rest-test' +apply plugin: 'java' + +apply plugin: 'opensearch.testclusters' + +ext { + licenseFile = rootProject.file('LICENSE.TXT') + noticeFile = rootProject.file('NOTICE') +} + +buildscript { + ext { + opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") + opensearch_group = "org.opensearch" + common_utils_version = System.getProperty("common_utils.version", '2.9.0.0-SNAPSHOT') + jackson_version = System.getProperty("jackson_version", "2.16.1") + http_client_version = System.getProperty("http_client_version", "4.5.14") + } + repositories { + mavenLocal() + maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } + mavenCentral() + maven { url "https://plugins.gradle.org/m2/" } + } + + dependencies { + classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}" + } +} + +repositories { + mavenLocal() + maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } + mavenCentral() + maven { url "https://plugins.gradle.org/m2/" } +} + +dependencies { + testImplementation "com.google.guava:guava:${versions.guava}" + testImplementation "org.opensearch.test:framework:${opensearch_version}" + testImplementation "org.apache.logging.log4j:log4j-core:${versions.log4j}" + testImplementation "org.opensearch:common-utils:${common_utils_version}" + testImplementation "com.fasterxml.jackson.core:jackson-databind:${jackson_version}" + testImplementation "com.fasterxml.jackson.core:jackson-annotations:${jackson_version}" + testImplementation "org.apache.httpcomponents:httpclient:${http_client_version}" + +} + +loggerUsageCheck.enabled = false +testingConventions.enabled = false +validateNebulaPom.enabled = false + +String previousVersion = System.getProperty("bwc.version.previous", "2.11.1.0") +String nextVersion = System.getProperty("bwc.version.next", "2.12.0.0") + +String bwcVersion = previousVersion +String baseName = "customCodecsBwcCluster" +String bwcFilePath = "src/test/resources/" +String projectVersion = nextVersion + +String previousOpenSearch = extractVersion(previousVersion) + "-SNAPSHOT"; +String nextOpenSearch = extractVersion(nextVersion) + "-SNAPSHOT"; + +// Extracts the OpenSearch version from a plugin version string, 2.11.0.0 -> 2.11.0. +def String extractVersion(versionStr) { + def versionMatcher = versionStr =~ /(.+?)(\.\d+)$/ + versionMatcher.find() + return versionMatcher.group(1) +} + +2.times {i -> + testClusters { + "${baseName}$i" { + testDistribution = "ARCHIVE" + versions = [previousOpenSearch, nextOpenSearch] + numberOfNodes = 3 + plugin(provider(new Callable() { + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return fileTree(bwcFilePath + bwcVersion).getSingleFile() + } + } + } + })) + nodes.each { node -> + def plugins = node.plugins + def firstPlugin = plugins.get(0) + plugins.remove(0) + plugins.add(firstPlugin) + + node.setting("network.bind_host", "0.0.0.0") + node.setting("network.publish_host", "0.0.0.0") + + } + + setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" + setting 'http.content_type.required', 'true' + } + } +} + +List> plugins = [ + provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return fileTree(bwcFilePath + projectVersion).getSingleFile() + } + } + } + }) +] + +// Creates a test cluster with 3 nodes of the old version. +2.times {i -> + task "${baseName}#oldVersionClusterTask$i"(type: StandaloneRestIntegTestTask) { + useCluster testClusters."${baseName}$i" + systemProperty 'tests.rest.bwcsuite', 'old_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'old' + systemProperty 'tests.plugin_bwc_version', bwcVersion + + systemProperty 'tests.security.manager', 'true' + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}$i".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}$i".getName()}") + } +} + +// Upgrades one node of the old cluster to new OpenSearch version with upgraded plugin version +// This results in a mixed cluster with 2 nodes on the old version and 1 upgraded node. +// This is also used as a one third upgraded cluster for a rolling upgrade. +task "${baseName}#mixedClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#oldVersionClusterTask0" + useCluster testClusters."${baseName}0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'first' + systemProperty 'tests.plugin_bwc_version', bwcVersion + + systemProperty 'tests.security.manager', 'true' + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrades the second node to new OpenSearch version with upgraded plugin version after the first node is upgraded. +// This results in a mixed cluster with 1 node on the old version and 2 upgraded nodes. +// This is used for rolling upgrade. +task "${baseName}#twoThirdsUpgradedClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#mixedClusterTask" + useCluster testClusters."${baseName}0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'second' + systemProperty 'tests.plugin_bwc_version', bwcVersion + + systemProperty 'tests.security.manager', 'true' + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrades the third node to new OpenSearch version with upgraded plugin version after the second node is upgraded. +// This results in a fully upgraded cluster. +// This is used for rolling upgrade. +task "${baseName}#rollingUpgradeClusterTask"(type: StandaloneRestIntegTestTask) { + dependsOn "${baseName}#twoThirdsUpgradedClusterTask" + useCluster testClusters."${baseName}0" + doFirst { + testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins) + } + systemProperty 'tests.rest.bwcsuite', 'mixed_cluster' + systemProperty 'tests.rest.bwcsuite_round', 'third' + systemProperty 'tests.plugin_bwc_version', bwcVersion + + systemProperty 'tests.security.manager', 'true' + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}") +} + +// Upgrades all the nodes of the old cluster to new OpenSearch version with upgraded plugin version +// at the same time resulting in a fully upgraded cluster. +tasks.register("${baseName}#fullRestartClusterTask", StandaloneRestIntegTestTask) { + dependsOn "${baseName}#oldVersionClusterTask1" + useCluster testClusters."${baseName}1" + doFirst { + testClusters."${baseName}1".upgradeAllNodesAndPluginsToNextVersion(plugins) + } + systemProperty 'tests.rest.bwcsuite', 'upgraded_cluster' + systemProperty 'tests.plugin_bwc_version', bwcVersion + systemProperty 'tests.rest.bwcsuite_round', 'first' + + systemProperty 'tests.security.manager', 'true' + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}1".allHttpSocketURI.join(",")}") + nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}1".getName()}") +} + +// A bwc test suite which runs all the bwc tasks combined. +task bwcTestSuite(type: StandaloneRestIntegTestTask) { + exclude '**/**' // Do not run any tests as part of this aggregate task + dependsOn tasks.named("${baseName}#mixedClusterTask") + dependsOn tasks.named("${baseName}#rollingUpgradeClusterTask") + dependsOn tasks.named("${baseName}#fullRestartClusterTask") +} diff --git a/bwc-test/settings.gradle b/bwc-test/settings.gradle new file mode 100644 index 0000000..4411844 --- /dev/null +++ b/bwc-test/settings.gradle @@ -0,0 +1,7 @@ +/* + * This file was generated by the Gradle 'init' task. + * + * This project uses @Incubating APIs which are subject to change. + */ + +rootProject.name = 'bwc-test' diff --git a/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/ClusterType.java b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/ClusterType.java new file mode 100644 index 0000000..94036f5 --- /dev/null +++ b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/ClusterType.java @@ -0,0 +1,28 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.customcodecs.bwc; + +public enum ClusterType { + OLD, + MIXED, + UPGRADED; + + public static ClusterType parse(String value) { + switch (value) { + case "old_cluster": + return OLD; + case "mixed_cluster": + return MIXED; + case "upgraded_cluster": + return UPGRADED; + default: + throw new AssertionError("unknown cluster type: " + value); + } + } +} diff --git a/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/CustomCodecsBwcCompatibilityIT.java b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/CustomCodecsBwcCompatibilityIT.java new file mode 100644 index 0000000..b3434fb --- /dev/null +++ b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/CustomCodecsBwcCompatibilityIT.java @@ -0,0 +1,237 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +package org.opensearch.customcodecs.bwc; + +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.net.ssl.SSLContext; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLContextBuilder; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.BasicCredentialsProvider; + +import org.junit.AfterClass; +import org.junit.Assume; +import org.junit.Before; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.Randomness; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.Strings; +import org.opensearch.customcodecs.bwc.helper.RestHelper; +import org.opensearch.test.rest.OpenSearchRestTestCase; + +public class CustomCodecsBwcCompatibilityIT extends OpenSearchRestTestCase { + private ClusterType CLUSTER_TYPE; + private String CLUSTER_NAME; + private static RestClient testUserRestClient = null; + + @Before + public void testSetup() throws IOException { + final String bwcsuiteString = System.getProperty("tests.rest.bwcsuite"); + Assume.assumeTrue("Test cannot be run outside the BWC gradle task 'bwcTestSuite' or its dependent tasks", bwcsuiteString != null); + CLUSTER_TYPE = ClusterType.parse(bwcsuiteString); + logger.info("Running Test for Cluster Type: {}", CLUSTER_TYPE); + CLUSTER_NAME = System.getProperty("tests.clustername"); + if (testUserRestClient == null) { + testUserRestClient = buildClient( + super.restClientSettings(), + super.getClusterHosts().toArray(new HttpHost[0]) + ); + } + } + + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { + RestClientBuilder builder = RestClient.builder(hosts); + configureHttpOrHttpsClient(builder, settings); + builder.setStrictDeprecationMode(true); + return builder.build(); + } + + protected void configureHttpOrHttpsClient(RestClientBuilder builder, Settings settings) throws IOException { + configureClient(builder, settings); + + if (getProtocol().equalsIgnoreCase("https")) { + final String username = System.getProperty("user"); + if (Strings.isNullOrEmpty(username)) { + throw new RuntimeException("user name is missing"); + } + + final String password = System.getProperty("password"); + if (Strings.isNullOrEmpty(password)) { + throw new RuntimeException("password is missing"); + } + + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); + + try { + final SSLContext sslContext = new SSLContextBuilder() + .loadTrustMaterial(null, TrustAllStrategy.INSTANCE) + .build(); + + builder.setHttpClientConfigCallback(httpClientBuilder -> { + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + .setSSLContext(sslContext) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE); + }); + } catch (final NoSuchAlgorithmException | KeyManagementException | KeyStoreException ex) { + throw new IOException(ex); + } + } + } + + @Override + protected String getProtocol() { + return Objects.equals(System.getProperty("https"), "true") ? "https" : "http"; + } + + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected final boolean preserveClusterUponCompletion() { + return true; + } + + @Override + protected final boolean preserveReposUponCompletion() { + return true; + } + + + @Override + protected final Settings restClientSettings() { + return Settings.builder() + .put(super.restClientSettings()) + // increase the timeout here to 90 seconds to handle long waits for a green + // cluster health. the waits for green need to be longer than a minute to + // account for delayed shards + .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") + .build(); + } + + + /** + * Tests backward compatibility by created a test user and role with DLS, FLS and masked field settings. Ingests + * data into a test index and runs a matchAll query against the same. + */ + public void testDataIngestionAndSearchBackwardsCompatibility() throws Exception { + String round = System.getProperty("tests.rest.bwcsuite_round"); + String index = "test-custom-codec-index"; + if (round.equals("old")) { + createIndexIfNotExists(index); + } + ingestData(index); + searchMatchAll(index); + } + + + /** + * Ingests data into the test index + * + * @param index index to ingest data into + */ + + private void ingestData(String index) throws IOException { + assertTrue(indexExists(index)); + StringBuilder bulkRequestBody = new StringBuilder(); + ObjectMapper objectMapper = new ObjectMapper(); + int numberOfRequests = Randomness.get().nextInt(10); + while (numberOfRequests-- > 0) { + for (int i = 0; i < Randomness.get().nextInt(100); i++) { + Map> indexRequest = new HashMap<>(); + indexRequest.put("index", new HashMap<>() { + { + put("_index", index); + } + }); + bulkRequestBody.append(objectMapper.writeValueAsString(indexRequest) + "\n"); + bulkRequestBody.append(objectMapper.writeValueAsString(Song.randomSong().asJson()) + "\n"); + } + List responses = RestHelper.requestAgainstAllNodes( + testUserRestClient, + "POST", + "_bulk?refresh=wait_for", + RestHelper.toHttpEntity(bulkRequestBody.toString()) + ); + responses.forEach(r -> assertEquals(200, r.getStatusLine().getStatusCode())); + } + } + + /** + * Runs a matchAll query against the test index + * + * @param index index to search + */ + private void searchMatchAll(String index) throws IOException { + String matchAllQuery = "{\n" + " \"query\": {\n" + " \"match_all\": {}\n" + " }\n" + "}"; + int numberOfRequests = Randomness.get().nextInt(10); + while (numberOfRequests-- > 0) { + List responses = RestHelper.requestAgainstAllNodes( + testUserRestClient, + "POST", + index + "/_search", + RestHelper.toHttpEntity(matchAllQuery) + ); + responses.forEach(r -> assertEquals(200, r.getStatusLine().getStatusCode())); + } + } + + /** + * Creates a test index if it does not exist already + * + * @param index index to create + */ + + private void createIndexIfNotExists(String index) throws IOException { + + if (indexExists(index)) { + logger.info("Index {} already created for the domain", index); + return; + } + + // creating index + createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom("zstd", "zstd_no_dict")) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + .build() + ); + ensureGreen(index); + } + + @AfterClass + public static void cleanUp() throws IOException { + IOUtils.close(testUserRestClient); + } + +} diff --git a/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/Song.java b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/Song.java new file mode 100644 index 0000000..273fefc --- /dev/null +++ b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/Song.java @@ -0,0 +1,118 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ +package org.opensearch.customcodecs.bwc; + +import java.util.Map; +import java.util.Objects; +import java.util.UUID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import org.opensearch.common.Randomness; + +public class Song { + + public static final String FIELD_TITLE = "title"; + public static final String FIELD_ARTIST = "artist"; + public static final String FIELD_LYRICS = "lyrics"; + public static final String FIELD_STARS = "stars"; + public static final String FIELD_GENRE = "genre"; + public static final String ARTIST_FIRST = "First artist"; + public static final String ARTIST_STRING = "String"; + public static final String ARTIST_TWINS = "Twins"; + public static final String TITLE_MAGNUM_OPUS = "Magnum Opus"; + public static final String TITLE_SONG_1_PLUS_1 = "Song 1+1"; + public static final String TITLE_NEXT_SONG = "Next song"; + public static final String ARTIST_NO = "No!"; + public static final String TITLE_POISON = "Poison"; + + public static final String ARTIST_YES = "yes"; + + public static final String TITLE_AFFIRMATIVE = "Affirmative"; + + public static final String ARTIST_UNKNOWN = "unknown"; + public static final String TITLE_CONFIDENTIAL = "confidential"; + + public static final String LYRICS_1 = "Very deep subject"; + public static final String LYRICS_2 = "Once upon a time"; + public static final String LYRICS_3 = "giant nonsense"; + public static final String LYRICS_4 = "Much too much"; + public static final String LYRICS_5 = "Little to little"; + public static final String LYRICS_6 = "confidential secret classified"; + + public static final String GENRE_ROCK = "rock"; + public static final String GENRE_JAZZ = "jazz"; + public static final String GENRE_BLUES = "blues"; + + public static final String QUERY_TITLE_NEXT_SONG = FIELD_TITLE + ":" + "\"" + TITLE_NEXT_SONG + "\""; + public static final String QUERY_TITLE_POISON = FIELD_TITLE + ":" + TITLE_POISON; + public static final String QUERY_TITLE_MAGNUM_OPUS = FIELD_TITLE + ":" + TITLE_MAGNUM_OPUS; + + public static final Song[] SONGS = { + new Song(ARTIST_FIRST, TITLE_MAGNUM_OPUS, LYRICS_1, 1, GENRE_ROCK), + new Song(ARTIST_STRING, TITLE_SONG_1_PLUS_1, LYRICS_2, 2, GENRE_BLUES), + new Song(ARTIST_TWINS, TITLE_NEXT_SONG, LYRICS_3, 3, GENRE_JAZZ), + new Song(ARTIST_NO, TITLE_POISON, LYRICS_4, 4, GENRE_ROCK), + new Song(ARTIST_YES, TITLE_AFFIRMATIVE, LYRICS_5, 5, GENRE_BLUES), + new Song(ARTIST_UNKNOWN, TITLE_CONFIDENTIAL, LYRICS_6, 6, GENRE_JAZZ)}; + + private final String artist; + private final String title; + private final String lyrics; + private final Integer stars; + private final String genre; + + public Song(String artist, String title, String lyrics, Integer stars, String genre) { + this.artist = Objects.requireNonNull(artist, "Artist is required"); + this.title = Objects.requireNonNull(title, "Title is required"); + this.lyrics = Objects.requireNonNull(lyrics, "Lyrics is required"); + this.stars = Objects.requireNonNull(stars, "Stars field is required"); + this.genre = Objects.requireNonNull(genre, "Genre field is required"); + } + + public String getArtist() { + return artist; + } + + public String getTitle() { + return title; + } + + public String getLyrics() { + return lyrics; + } + + public Integer getStars() { + return stars; + } + + public String getGenre() { + return genre; + } + + public Map asMap() { + return Map.of(FIELD_ARTIST, artist, FIELD_TITLE, title, FIELD_LYRICS, lyrics, FIELD_STARS, stars, FIELD_GENRE, genre); + } + + public String asJson() throws JsonProcessingException { + return new ObjectMapper().writeValueAsString(this.asMap()); + } + + public static Song randomSong() { + return new Song( + UUID.randomUUID().toString(), + UUID.randomUUID().toString(), + UUID.randomUUID().toString(), + Randomness.get().nextInt(5), + UUID.randomUUID().toString() + ); + } +} diff --git a/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/helper/RestHelper.java b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/helper/RestHelper.java new file mode 100644 index 0000000..79d9f88 --- /dev/null +++ b/bwc-test/src/test/java/org/opensearch/customcodecs/bwc/helper/RestHelper.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.customcodecs.bwc.helper; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHeader; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.WarningsHandler; + +import static org.apache.http.entity.ContentType.APPLICATION_JSON; + +public class RestHelper { + + private static final Logger log = LogManager.getLogger(RestHelper.class); + + public static HttpEntity toHttpEntity(String jsonString) { + return new StringEntity(jsonString, APPLICATION_JSON); + } + + public static Response get(RestClient client, String url) throws IOException { + return makeRequest(client, "GET", url, null, null); + } + + public static Response makeRequest(RestClient client, String method, String endpoint, HttpEntity entity) throws IOException { + return makeRequest(client, method, endpoint, entity, null); + } + + public static Response makeRequest(RestClient client, String method, String endpoint, HttpEntity entity, List
headers) + throws IOException { + log.info("Making request " + method + " " + endpoint + ", with headers " + headers); + + Request request = new Request(method, endpoint); + + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.setWarningsHandler(WarningsHandler.PERMISSIVE); + if (headers != null) { + headers.forEach(header -> options.addHeader(header.getName(), header.getValue())); + } + request.setOptions(options.build()); + + if (entity != null) { + request.setEntity(entity); + } + + Response response = client.performRequest(request); + log.info("Recieved response " + response.getStatusLine()); + return response; + } + + public static List requestAgainstAllNodes(RestClient client, String method, String endpoint, HttpEntity entity) + throws IOException { + return requestAgainstAllNodes(client, method, endpoint, entity, null); + } + + public static List requestAgainstAllNodes( + RestClient client, + String method, + String endpoint, + HttpEntity entity, + List
headers + ) throws IOException { + int nodeCount = client.getNodes().size(); + List responses = new ArrayList<>(); + while (nodeCount-- > 0) { + responses.add(makeRequest(client, method, endpoint, entity, headers)); + } + return responses; + } + + public static Header getAuthorizationHeader(String username, String password) { + return new BasicHeader("Authorization", "Basic " + username + ":" + password); + } +} \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index 6b599a5..bc41600 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,3 +8,4 @@ rootProject.name = "opensearch-custom-codecs" startParameter.excludedTaskNames=["publishPluginZipPublicationToMavenLocal", "publishPluginZipPublicationToStagingRepository"] + diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/CustomCodecService.java b/src/main/java/org/opensearch/index/codec/customcodecs/CustomCodecService.java index de0eb2b..90519d2 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/CustomCodecService.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/CustomCodecService.java @@ -47,11 +47,11 @@ public CustomCodecService(MapperService mapperService, IndexSettings indexSettin int compressionLevel = indexSettings.getValue(INDEX_CODEC_COMPRESSION_LEVEL_SETTING); final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(ZSTD_CODEC, new ZstdCodec(compressionLevel)); - codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDictCodec(compressionLevel)); + codecs.put(ZSTD_CODEC, new Zstd99Codec(compressionLevel)); + codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDict99Codec(compressionLevel)); } else { - codecs.put(ZSTD_CODEC, new ZstdCodec(mapperService, logger, compressionLevel)); - codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDictCodec(mapperService, logger, compressionLevel)); + codecs.put(ZSTD_CODEC, new Zstd99Codec(mapperService, logger, compressionLevel)); + codecs.put(ZSTD_NO_DICT_CODEC, new ZstdNoDict99Codec(mapperService, logger, compressionLevel)); } this.codecs = codecs.immutableMap(); } diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomCodec.java b/src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomCodec.java similarity index 75% rename from src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomCodec.java rename to src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomCodec.java index 89acc98..e0b3c2b 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomCodec.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomCodec.java @@ -11,21 +11,21 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; import org.opensearch.index.mapper.MapperService; -import java.util.Collections; import java.util.Set; /** * * Extends {@link FilterCodec} to reuse the functionality of Lucene Codec. * Supports two modes zstd and zstd_no_dict. + * Uses Lucene99 as the delegate codec * * @opensearch.internal */ -public abstract class Lucene95CustomCodec extends FilterCodec { +public abstract class Lucene99CustomCodec extends FilterCodec { /** Default compression level used for compression */ public static final int DEFAULT_COMPRESSION_LEVEL = 3; @@ -35,17 +35,11 @@ public enum Mode { /** * ZStandard mode with dictionary */ - ZSTD("ZSTD", Set.of("zstd")), + ZSTD("ZSTD99", Set.of("zstd")), /** * ZStandard mode without dictionary */ - ZSTD_NO_DICT("ZSTDNODICT", Set.of("zstd_no_dict")), - /** - * Deprecated ZStandard mode, added for backward compatibility to support indices created in 2.9.0 where - * both ZSTD and ZSTD_NO_DICT used Lucene95CustomCodec underneath. This should not be used to - * create new indices. - */ - ZSTD_DEPRECATED("Lucene95CustomCodec", Collections.emptySet()); + ZSTD_NO_DICT("ZSTDNODICT99", Set.of("zstd_no_dict")); private final String codec; private final Set aliases; @@ -77,7 +71,7 @@ public Set getAliases() { * * @param mode The compression codec (ZSTD or ZSTDNODICT). */ - public Lucene95CustomCodec(Mode mode) { + public Lucene99CustomCodec(Mode mode) { this(mode, DEFAULT_COMPRESSION_LEVEL); } @@ -89,9 +83,9 @@ public Lucene95CustomCodec(Mode mode) { * @param mode The compression codec (ZSTD or ZSTDNODICT). * @param compressionLevel The compression level. */ - public Lucene95CustomCodec(Mode mode, int compressionLevel) { - super(mode.getCodec(), new Lucene95Codec()); - this.storedFieldsFormat = new Lucene95CustomStoredFieldsFormat(mode, compressionLevel); + public Lucene99CustomCodec(Mode mode, int compressionLevel) { + super(mode.getCodec(), new Lucene99Codec()); + this.storedFieldsFormat = new Lucene99CustomStoredFieldsFormat(mode, compressionLevel); } /** @@ -104,9 +98,9 @@ public Lucene95CustomCodec(Mode mode, int compressionLevel) { * @param mapperService The mapper service. * @param logger The logger. */ - public Lucene95CustomCodec(Mode mode, int compressionLevel, MapperService mapperService, Logger logger) { - super(mode.getCodec(), new PerFieldMappingPostingFormatCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, logger)); - this.storedFieldsFormat = new Lucene95CustomStoredFieldsFormat(mode, compressionLevel); + public Lucene99CustomCodec(Mode mode, int compressionLevel, MapperService mapperService, Logger logger) { + super(mode.getCodec(), new PerFieldMappingPostingFormatCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, logger)); + this.storedFieldsFormat = new Lucene99CustomStoredFieldsFormat(mode, compressionLevel); } @Override diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormat.java b/src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormat.java new file mode 100644 index 0000000..ff32c11 --- /dev/null +++ b/src/main/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormat.java @@ -0,0 +1,140 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.customcodecs; + +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.StoredFieldsReader; +import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.codecs.compressing.CompressionMode; +import org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingStoredFieldsFormat; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.opensearch.index.codec.customcodecs.backward_codecs.Lucene95CustomCodec; + +import java.io.IOException; +import java.util.Objects; + +/** Stored field format used by pluggable codec */ +public class Lucene99CustomStoredFieldsFormat extends StoredFieldsFormat { + + /** A key that we use to map to a mode */ + public static final String MODE_KEY = Lucene99CustomStoredFieldsFormat.class.getSimpleName() + ".mode"; + + protected static final int ZSTD_BLOCK_LENGTH = 10 * 48 * 1024; + protected static final int ZSTD_MAX_DOCS_PER_BLOCK = 4096; + protected static final int ZSTD_BLOCK_SHIFT = 10; + + private final CompressionMode zstdCompressionMode; + private final CompressionMode zstdNoDictCompressionMode; + + private final Lucene99CustomCodec.Mode mode; + private final int compressionLevel; + + /** default constructor */ + public Lucene99CustomStoredFieldsFormat() { + this(Lucene99CustomCodec.Mode.ZSTD, Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL); + } + + /** + * Creates a new instance. + * + * @param mode The mode represents ZSTD or ZSTDNODICT + */ + public Lucene99CustomStoredFieldsFormat(Lucene99CustomCodec.Mode mode) { + this(mode, Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL); + } + + /** + * Creates a new instance with the specified mode and compression level. + * + * @param mode The mode represents ZSTD or ZSTDNODICT + * @param compressionLevel The compression level for the mode. + */ + public Lucene99CustomStoredFieldsFormat(Lucene99CustomCodec.Mode mode, int compressionLevel) { + this.mode = Objects.requireNonNull(mode); + this.compressionLevel = compressionLevel; + zstdCompressionMode = new ZstdCompressionMode(compressionLevel); + zstdNoDictCompressionMode = new ZstdNoDictCompressionMode(compressionLevel); + } + + /** + * Returns a {@link StoredFieldsReader} to load stored fields. + * @param directory The index directory. + * @param si The SegmentInfo that stores segment information. + * @param fn The fieldInfos. + * @param context The IOContext that holds additional details on the merge/search context. + */ + @Override + public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException { + if (si.getAttribute(MODE_KEY) !=null){ + String value = si.getAttribute(MODE_KEY); + Lucene99CustomCodec.Mode mode = Lucene99CustomCodec.Mode.valueOf(value); + return impl(mode).fieldsReader(directory, si, fn, context); + } else { + throw new IllegalStateException("missing value for " + MODE_KEY + " for segment: " + si.name); + } + } + + /** + * Returns a {@link StoredFieldsReader} to write stored fields. + * @param directory The index directory. + * @param si The SegmentInfo that stores segment information. + * @param context The IOContext that holds additional details on the merge/search context. + */ + @Override + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + String previous = si.putAttribute(MODE_KEY, mode.name()); + if (previous != null && previous.equals(mode.name()) == false) { + throw new IllegalStateException( + "found existing value for " + MODE_KEY + " for segment: " + si.name + " old = " + previous + ", new = " + mode.name() + ); + } + return impl(mode).fieldsWriter(directory, si, context); + } + + StoredFieldsFormat impl(Lucene99CustomCodec.Mode mode) { + switch (mode) { + case ZSTD: + return getCustomCompressingStoredFieldsFormat("CustomStoredFieldsZstd", this.zstdCompressionMode); + case ZSTD_NO_DICT: + return getCustomCompressingStoredFieldsFormat("CustomStoredFieldsZstdNoDict", this.zstdNoDictCompressionMode); + default: + throw new AssertionError(); + } + } + + + private StoredFieldsFormat getCustomCompressingStoredFieldsFormat(String formatName, CompressionMode compressionMode) { + return new Lucene90CompressingStoredFieldsFormat( + formatName, + compressionMode, + ZSTD_BLOCK_LENGTH, + ZSTD_MAX_DOCS_PER_BLOCK, + ZSTD_BLOCK_SHIFT + ); + } + + public Lucene99CustomCodec.Mode getMode() { + return mode; + } + + /** + * Returns the compression level. + */ + public int getCompressionLevel() { + return compressionLevel; + } + + public CompressionMode getCompressionMode() { + return mode == Lucene99CustomCodec.Mode.ZSTD_NO_DICT ? zstdNoDictCompressionMode : zstdCompressionMode; + } + +} diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java b/src/main/java/org/opensearch/index/codec/customcodecs/Zstd99Codec.java similarity index 86% rename from src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java rename to src/main/java/org/opensearch/index/codec/customcodecs/Zstd99Codec.java index a3e3a34..df86806 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/Zstd99Codec.java @@ -20,12 +20,12 @@ /** * ZstdCodec provides ZSTD compressor using the zstd-jni library. */ -public class ZstdCodec extends Lucene95CustomCodec implements CodecSettings, CodecAliases { +public class Zstd99Codec extends Lucene99CustomCodec implements CodecSettings, CodecAliases { /** * Creates a new ZstdCodec instance with the default compression level. */ - public ZstdCodec() { + public Zstd99Codec() { this(DEFAULT_COMPRESSION_LEVEL); } @@ -34,7 +34,7 @@ public ZstdCodec() { * * @param compressionLevel The compression level. */ - public ZstdCodec(int compressionLevel) { + public Zstd99Codec(int compressionLevel) { super(Mode.ZSTD, compressionLevel); } @@ -45,7 +45,7 @@ public ZstdCodec(int compressionLevel) { * @param logger The logger. * @param compressionLevel The compression level. */ - public ZstdCodec(MapperService mapperService, Logger logger, int compressionLevel) { + public Zstd99Codec(MapperService mapperService, Logger logger, int compressionLevel) { super(Mode.ZSTD, compressionLevel, mapperService, logger); } diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java b/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDict99Codec.java similarity index 85% rename from src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java rename to src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDict99Codec.java index ea7351f..1d65aee 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDict99Codec.java @@ -20,12 +20,12 @@ /** * ZstdNoDictCodec provides ZSTD compressor without a dictionary support. */ -public class ZstdNoDictCodec extends Lucene95CustomCodec implements CodecSettings, CodecAliases { +public class ZstdNoDict99Codec extends Lucene99CustomCodec implements CodecSettings, CodecAliases { /** * Creates a new ZstdNoDictCodec instance with the default compression level. */ - public ZstdNoDictCodec() { + public ZstdNoDict99Codec() { this(DEFAULT_COMPRESSION_LEVEL); } @@ -34,7 +34,7 @@ public ZstdNoDictCodec() { * * @param compressionLevel The compression level. */ - public ZstdNoDictCodec(int compressionLevel) { + public ZstdNoDict99Codec(int compressionLevel) { super(Mode.ZSTD_NO_DICT, compressionLevel); } @@ -45,7 +45,7 @@ public ZstdNoDictCodec(int compressionLevel) { * @param logger The logger. * @param compressionLevel The compression level. */ - public ZstdNoDictCodec(MapperService mapperService, Logger logger, int compressionLevel) { + public ZstdNoDict99Codec(MapperService mapperService, Logger logger, int compressionLevel) { super(Mode.ZSTD_NO_DICT, compressionLevel, mapperService, logger); } diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomCodec.java b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomCodec.java new file mode 100644 index 0000000..2924590 --- /dev/null +++ b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomCodec.java @@ -0,0 +1,93 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.customcodecs.backward_codecs; + +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.backward_codecs.lucene95.Lucene95Codec; + +import java.util.Collections; +import java.util.Set; + +/** + * + * Extends {@link FilterCodec} to reuse the functionality of Lucene Codec. + * Supports two modes zstd and zstd_no_dict. + * Uses Lucene95 as the delegate codec + * + * @opensearch.internal + */ +public abstract class Lucene95CustomCodec extends FilterCodec { + + /** Default compression level used for compression */ + public static final int DEFAULT_COMPRESSION_LEVEL = 3; + + /** Each mode represents a compression algorithm. */ + public enum Mode { + /** + * ZStandard mode with dictionary + */ + ZSTD("ZSTD", Set.of("zstd")), + /** + * ZStandard mode without dictionary + */ + ZSTD_NO_DICT("ZSTDNODICT", Set.of("zstd_no_dict")), + /** + * Deprecated ZStandard mode, added for backward compatibility to support indices created in 2.9.0 where + * both ZSTD and ZSTD_NO_DICT used Lucene95CustomCodec underneath. This should not be used to + * create new indices. + */ + ZSTD_DEPRECATED("Lucene95CustomCodec", Collections.emptySet()); + + private final String codec; + private final Set aliases; + + Mode(String codec, Set aliases) { + this.codec = codec; + this.aliases = aliases; + } + + /** + * Returns the Codec that is registered with Lucene + */ + public String getCodec() { + return codec; + } + + /** + * Returns the aliases of the Codec + */ + public Set getAliases() { + return aliases; + } + } + + private final StoredFieldsFormat storedFieldsFormat; + + /** + * Creates a new compression codec. + * + * @param mode The compression codec (ZSTD or ZSTDNODICT). + */ + + public Lucene95CustomCodec(Mode mode) { + super(mode.getCodec(), new Lucene95Codec()); + this.storedFieldsFormat = new Lucene95CustomStoredFieldsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } +} diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormat.java b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomStoredFieldsFormat.java similarity index 74% rename from src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormat.java rename to src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomStoredFieldsFormat.java index 512df54..be92032 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormat.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Lucene95CustomStoredFieldsFormat.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.customcodecs; +package org.opensearch.index.codec.customcodecs.backward_codecs; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.StoredFieldsReader; @@ -17,6 +17,8 @@ import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; +import org.opensearch.index.codec.customcodecs.ZstdCompressionMode; +import org.opensearch.index.codec.customcodecs.ZstdNoDictCompressionMode; import java.io.IOException; import java.util.Objects; @@ -60,17 +62,17 @@ public Lucene95CustomStoredFieldsFormat(Lucene95CustomCodec.Mode mode) { public Lucene95CustomStoredFieldsFormat(Lucene95CustomCodec.Mode mode, int compressionLevel) { this.mode = Objects.requireNonNull(mode); this.compressionLevel = compressionLevel; - zstdCompressionMode = new ZstdCompressionMode(compressionLevel); - zstdNoDictCompressionMode = new ZstdNoDictCompressionMode(compressionLevel); + zstdCompressionMode = new ZstdCompressionMode(compressionLevel){}; + zstdNoDictCompressionMode = new ZstdNoDictCompressionMode(compressionLevel){}; } /** - * Returns a {@link StoredFieldsReader} to load stored fields. - * @param directory The index directory. - * @param si The SegmentInfo that stores segment information. - * @param fn The fieldInfos. - * @param context The IOContext that holds additional details on the merge/search context. - */ + * Returns a {@link StoredFieldsReader} to load stored fields. + * @param directory The index directory. + * @param si The SegmentInfo that stores segment information. + * @param fn The fieldInfos. + * @param context The IOContext that holds additional details on the merge/search context. + */ @Override public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException { String value = si.getAttribute(MODE_KEY); @@ -82,17 +84,17 @@ public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, Fiel } /** - * Returns a {@link StoredFieldsReader} to write stored fields. - * @param directory The index directory. - * @param si The SegmentInfo that stores segment information. - * @param context The IOContext that holds additional details on the merge/search context. - */ + * Returns a {@link StoredFieldsReader} to write stored fields. + * @param directory The index directory. + * @param si The SegmentInfo that stores segment information. + * @param context The IOContext that holds additional details on the merge/search context. + */ @Override public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { String previous = si.putAttribute(MODE_KEY, mode.name()); if (previous != null && previous.equals(mode.name()) == false) { throw new IllegalStateException( - "found existing value for " + MODE_KEY + " for segment: " + si.name + " old = " + previous + ", new = " + mode.name() + "found existing value for " + MODE_KEY + " for segment: " + si.name + " old = " + previous + ", new = " + mode.name() ); } return impl(mode).fieldsWriter(directory, si, context); @@ -103,19 +105,19 @@ StoredFieldsFormat impl(Lucene95CustomCodec.Mode mode) { case ZSTD: case ZSTD_DEPRECATED: return new Lucene90CompressingStoredFieldsFormat( - "CustomStoredFieldsZstd", - zstdCompressionMode, - ZSTD_BLOCK_LENGTH, - ZSTD_MAX_DOCS_PER_BLOCK, - ZSTD_BLOCK_SHIFT + "CustomStoredFieldsZstd", + zstdCompressionMode, + ZSTD_BLOCK_LENGTH, + ZSTD_MAX_DOCS_PER_BLOCK, + ZSTD_BLOCK_SHIFT ); case ZSTD_NO_DICT: return new Lucene90CompressingStoredFieldsFormat( - "CustomStoredFieldsZstdNoDict", - zstdNoDictCompressionMode, - ZSTD_BLOCK_LENGTH, - ZSTD_MAX_DOCS_PER_BLOCK, - ZSTD_BLOCK_SHIFT + "CustomStoredFieldsZstdNoDict", + zstdNoDictCompressionMode, + ZSTD_BLOCK_LENGTH, + ZSTD_MAX_DOCS_PER_BLOCK, + ZSTD_BLOCK_SHIFT ); default: throw new AssertionError(); @@ -137,4 +139,4 @@ public CompressionMode getCompressionMode() { return mode == Lucene95CustomCodec.Mode.ZSTD_NO_DICT ? zstdNoDictCompressionMode : zstdCompressionMode; } -} +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95Codec.java b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95Codec.java new file mode 100644 index 0000000..e41f690 --- /dev/null +++ b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95Codec.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.customcodecs.backward_codecs; + +import org.opensearch.common.settings.Setting; +import org.opensearch.index.codec.CodecAliases; +import org.opensearch.index.codec.CodecSettings; +import org.opensearch.index.engine.EngineConfig; + +import java.util.Set; + +/** + * ZstdCodec provides ZSTD compressor using the zstd-jni library. + */ +public class Zstd95Codec extends Lucene95CustomCodec implements CodecSettings, CodecAliases { + + /** + * Creates a new ZstdCodec instance with the default compression level. + */ + public Zstd95Codec() { + super(Mode.ZSTD); + } + + + /** The name for this codec. */ + @Override + public String toString() { + return getClass().getSimpleName(); + } + + @Override + public boolean supports(Setting setting) { + return setting.equals(EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING); + } + + @Override + public Set aliases() { + return Mode.ZSTD.getAliases(); + } +} diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdDeprecatedCodec.java b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95DeprecatedCodec.java similarity index 51% rename from src/main/java/org/opensearch/index/codec/customcodecs/ZstdDeprecatedCodec.java rename to src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95DeprecatedCodec.java index 02fa386..61e16ad 100644 --- a/src/main/java/org/opensearch/index/codec/customcodecs/ZstdDeprecatedCodec.java +++ b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/Zstd95DeprecatedCodec.java @@ -6,46 +6,24 @@ * compatible open source license. */ -package org.opensearch.index.codec.customcodecs; +package org.opensearch.index.codec.customcodecs.backward_codecs; -import org.apache.logging.log4j.Logger; import org.opensearch.common.settings.Setting; import org.opensearch.index.codec.CodecSettings; import org.opensearch.index.engine.EngineConfig; -import org.opensearch.index.mapper.MapperService; /** * ZstdDeprecatedCodec provides ZSTD compressor using the zstd-jni library. * Added to support backward compatibility for indices created with Lucene95CustomCodec as codec name. */ @Deprecated(since = "2.10") -public class ZstdDeprecatedCodec extends Lucene95CustomCodec implements CodecSettings { +public class Zstd95DeprecatedCodec extends Lucene95CustomCodec implements CodecSettings { /** * Creates a new ZstdDefaultCodec instance with the default compression level. */ - public ZstdDeprecatedCodec() { - this(DEFAULT_COMPRESSION_LEVEL); - } - - /** - * Creates a new ZstdDefaultCodec instance. - * - * @param compressionLevel The compression level. - */ - public ZstdDeprecatedCodec(int compressionLevel) { - super(Mode.ZSTD_DEPRECATED, compressionLevel); - } - - /** - * Creates a new ZstdDefaultCodec instance. - * - * @param mapperService The mapper service. - * @param logger The logger. - * @param compressionLevel The compression level. - */ - public ZstdDeprecatedCodec(MapperService mapperService, Logger logger, int compressionLevel) { - super(Mode.ZSTD_DEPRECATED, compressionLevel, mapperService, logger); + public Zstd95DeprecatedCodec() { + super(Mode.ZSTD_DEPRECATED); } /** The name for this codec. */ diff --git a/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/ZstdNoDict95Codec.java b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/ZstdNoDict95Codec.java new file mode 100644 index 0000000..f85753d --- /dev/null +++ b/src/main/java/org/opensearch/index/codec/customcodecs/backward_codecs/ZstdNoDict95Codec.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.customcodecs.backward_codecs; + +import org.opensearch.common.settings.Setting; +import org.opensearch.index.codec.CodecAliases; +import org.opensearch.index.codec.CodecSettings; +import org.opensearch.index.engine.EngineConfig; + +import java.util.Set; + +/** + * ZstdNoDictCodec provides ZSTD compressor without a dictionary support. + */ +public class ZstdNoDict95Codec extends Lucene95CustomCodec implements CodecSettings, CodecAliases { + + /** + * Creates a new ZstdNoDictCodec instance with the default compression level. + */ + public ZstdNoDict95Codec() { + super(Mode.ZSTD_NO_DICT); + } + + /** The name for this codec. */ + @Override + public String toString() { + return getClass().getSimpleName(); + } + + @Override + public boolean supports(Setting setting) { + return setting.equals(EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING); + } + + @Override + public Set aliases() { + return Mode.ZSTD_NO_DICT.getAliases(); + } +} diff --git a/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index ba50540..5912ede 100644 --- a/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,3 +1,5 @@ -org.opensearch.index.codec.customcodecs.ZstdCodec -org.opensearch.index.codec.customcodecs.ZstdNoDictCodec -org.opensearch.index.codec.customcodecs.ZstdDeprecatedCodec +org.opensearch.index.codec.customcodecs.backward_codecs.Zstd95Codec +org.opensearch.index.codec.customcodecs.backward_codecs.ZstdNoDict95Codec +org.opensearch.index.codec.customcodecs.backward_codecs.Zstd95DeprecatedCodec +org.opensearch.index.codec.customcodecs.Zstd99Codec +org.opensearch.index.codec.customcodecs.ZstdNoDict99Codec \ No newline at end of file diff --git a/src/test/java/org/opensearch/index/codec/customcodecs/CustomCodecTests.java b/src/test/java/org/opensearch/index/codec/customcodecs/CustomCodecTests.java index 5365b9e..9cc3157 100644 --- a/src/test/java/org/opensearch/index/codec/customcodecs/CustomCodecTests.java +++ b/src/test/java/org/opensearch/index/codec/customcodecs/CustomCodecTests.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -81,16 +81,16 @@ public void setup() { public void testZstd() throws Exception { Codec codec = createCodecService(false).codec("zstd"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); - assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); } public void testZstdNoDict() throws Exception { Codec codec = createCodecService(false).codec("zstd_no_dict"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); - assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD_NO_DICT, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); } public void testZstdDeprecatedCodec() { @@ -104,16 +104,16 @@ public void testZstdDeprecatedCodec() { public void testZstdWithCompressionLevel() throws Exception { int randomCompressionLevel = randomIntBetween(1, 6); Codec codec = createCodecService(randomCompressionLevel, "zstd").codec("zstd"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionLevel()); } public void testZstdNoDictWithCompressionLevel() throws Exception { int randomCompressionLevel = randomIntBetween(1, 6); Codec codec = createCodecService(randomCompressionLevel, "zstd_no_dict").codec("zstd_no_dict"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD_NO_DICT, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionLevel()); } @@ -154,41 +154,41 @@ public void testZstandardCompressionLevelSupport() throws Exception { public void testDefaultMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompressionMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } public void testZstdMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("zstd"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); - assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); } public void testZstdNoDictMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("zstd_no_dict"); - assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, codec); - Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); - assertEquals(Lucene95CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); + assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode.ZSTD_NO_DICT, codec); + Lucene99CustomStoredFieldsFormat storedFieldsFormat = (Lucene99CustomStoredFieldsFormat) codec.storedFieldsFormat(); + assertEquals(Lucene99CustomCodec.DEFAULT_COMPRESSION_LEVEL, storedFieldsFormat.getCompressionLevel()); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { SegmentReader sr = getSegmentReader(actual); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); } - private void assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99CustomCodec.Mode expected, Codec actual) throws Exception { SegmentReader sr = getSegmentReader(actual); - String v = sr.getSegmentInfo().info.getAttribute(Lucene95CustomStoredFieldsFormat.MODE_KEY); + String v = sr.getSegmentInfo().info.getAttribute(Lucene99CustomStoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95CustomCodec.Mode.valueOf(v)); + assertEquals(expected, Lucene99CustomCodec.Mode.valueOf(v)); } private CodecService createCodecService(boolean isMapperServiceNull) throws IOException { diff --git a/src/test/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormatTests.java b/src/test/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormatTests.java index e669b7f..f03f164 100644 --- a/src/test/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormatTests.java +++ b/src/test/java/org/opensearch/index/codec/customcodecs/Lucene95CustomStoredFieldsFormatTests.java @@ -8,6 +8,8 @@ package org.opensearch.index.codec.customcodecs; +import org.opensearch.index.codec.customcodecs.backward_codecs.Lucene95CustomCodec; +import org.opensearch.index.codec.customcodecs.backward_codecs.Lucene95CustomStoredFieldsFormat; import org.opensearch.test.OpenSearchTestCase; public class Lucene95CustomStoredFieldsFormatTests extends OpenSearchTestCase { @@ -19,7 +21,7 @@ public void testDefaultLucene95CustomCodecMode() { public void testZstdNoDictLucene95CustomCodecMode() { Lucene95CustomStoredFieldsFormat lucene95CustomStoredFieldsFormat = new Lucene95CustomStoredFieldsFormat( - Lucene95CustomCodec.Mode.ZSTD_NO_DICT + Lucene95CustomCodec.Mode.ZSTD_NO_DICT ); assertEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, lucene95CustomStoredFieldsFormat.getMode()); } @@ -27,8 +29,8 @@ public void testZstdNoDictLucene95CustomCodecMode() { public void testZstdModeWithCompressionLevel() { int randomCompressionLevel = randomIntBetween(1, 6); Lucene95CustomStoredFieldsFormat lucene95CustomStoredFieldsFormat = new Lucene95CustomStoredFieldsFormat( - Lucene95CustomCodec.Mode.ZSTD, - randomCompressionLevel + Lucene95CustomCodec.Mode.ZSTD, + randomCompressionLevel ); assertEquals(Lucene95CustomCodec.Mode.ZSTD, lucene95CustomStoredFieldsFormat.getMode()); assertEquals(randomCompressionLevel, lucene95CustomStoredFieldsFormat.getCompressionLevel()); @@ -37,8 +39,8 @@ public void testZstdModeWithCompressionLevel() { public void testZstdNoDictLucene95CustomCodecModeWithCompressionLevel() { int randomCompressionLevel = randomIntBetween(1, 6); Lucene95CustomStoredFieldsFormat lucene95CustomStoredFieldsFormat = new Lucene95CustomStoredFieldsFormat( - Lucene95CustomCodec.Mode.ZSTD_NO_DICT, - randomCompressionLevel + Lucene95CustomCodec.Mode.ZSTD_NO_DICT, + randomCompressionLevel ); assertEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, lucene95CustomStoredFieldsFormat.getMode()); assertEquals(randomCompressionLevel, lucene95CustomStoredFieldsFormat.getCompressionLevel()); @@ -54,4 +56,4 @@ public void testZstdNoDictCompressionModes(){ assertTrue(lucene95CustomStoredFieldsFormat.getCompressionMode() instanceof ZstdNoDictCompressionMode); } -} +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormatTests.java b/src/test/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormatTests.java new file mode 100644 index 0000000..c966901 --- /dev/null +++ b/src/test/java/org/opensearch/index/codec/customcodecs/Lucene99CustomStoredFieldsFormatTests.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.customcodecs; + +import org.opensearch.test.OpenSearchTestCase; + +public class Lucene99CustomStoredFieldsFormatTests extends OpenSearchTestCase { + + public void testDefaultLucene99CustomCodecMode() { + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat(); + assertEquals(Lucene99CustomCodec.Mode.ZSTD, lucene99CustomStoredFieldsFormat.getMode()); + } + + public void testZstdNoDictLucene99CustomCodecMode() { + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat( + Lucene99CustomCodec.Mode.ZSTD_NO_DICT + ); + assertEquals(Lucene99CustomCodec.Mode.ZSTD_NO_DICT, lucene99CustomStoredFieldsFormat.getMode()); + } + + public void testZstdModeWithCompressionLevel() { + int randomCompressionLevel = randomIntBetween(1, 6); + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat( + Lucene99CustomCodec.Mode.ZSTD, + randomCompressionLevel + ); + assertEquals(Lucene99CustomCodec.Mode.ZSTD, lucene99CustomStoredFieldsFormat.getMode()); + assertEquals(randomCompressionLevel, lucene99CustomStoredFieldsFormat.getCompressionLevel()); + } + + public void testZstdNoDictLucene99CustomCodecModeWithCompressionLevel() { + int randomCompressionLevel = randomIntBetween(1, 6); + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat( + Lucene99CustomCodec.Mode.ZSTD_NO_DICT, + randomCompressionLevel + ); + assertEquals(Lucene99CustomCodec.Mode.ZSTD_NO_DICT, lucene99CustomStoredFieldsFormat.getMode()); + assertEquals(randomCompressionLevel, lucene99CustomStoredFieldsFormat.getCompressionLevel()); + } + + public void testCompressionModes(){ + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat(); + assertTrue(lucene99CustomStoredFieldsFormat.getCompressionMode() instanceof ZstdCompressionMode); + } + + public void testZstdNoDictCompressionModes(){ + Lucene99CustomStoredFieldsFormat lucene99CustomStoredFieldsFormat = new Lucene99CustomStoredFieldsFormat(Lucene99CustomCodec.Mode.ZSTD_NO_DICT); + assertTrue(lucene99CustomStoredFieldsFormat.getCompressionMode() instanceof ZstdNoDictCompressionMode); + } + +}