diff --git a/build.gradle b/build.gradle index c4c2b0a3f5407..20a40c27f73de 100644 --- a/build.gradle +++ b/build.gradle @@ -28,25 +28,22 @@ * under the License. */ -import java.nio.charset.StandardCharsets; -import java.io.ByteArrayOutputStream; import com.avast.gradle.dockercompose.tasks.ComposePull import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin import org.apache.tools.ant.taskdefs.condition.Os +import org.gradle.plugins.ide.eclipse.model.AccessRule +import org.gradle.plugins.ide.eclipse.model.SourceFolder import org.opensearch.gradle.BuildPlugin +import org.opensearch.gradle.CheckCompatibilityTask import org.opensearch.gradle.Version import org.opensearch.gradle.VersionProperties import org.opensearch.gradle.info.BuildParams import org.opensearch.gradle.plugin.PluginBuildPlugin import org.opensearch.gradle.tar.SymbolicLinkPreservingTar -import org.gradle.plugins.ide.eclipse.model.AccessRule -import org.gradle.plugins.ide.eclipse.model.EclipseJdt -import org.gradle.plugins.ide.eclipse.model.SourceFolder -import org.gradle.api.Project; -import org.gradle.process.ExecResult; -import org.opensearch.gradle.CheckCompatibilityTask + +import java.nio.charset.StandardCharsets import static org.opensearch.gradle.util.GradleUtils.maybeConfigure @@ -55,6 +52,7 @@ plugins { id 'opensearch.docker-support' id 'opensearch.global-build-info' id "com.diffplug.spotless" version "6.25.0" apply false + id "org.openrewrite.rewrite" version "7.11.0" apply false id "org.gradle.test-retry" version "1.6.2" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' @@ -65,6 +63,7 @@ apply from: 'gradle/runtime-jdk-provision.gradle' apply from: 'gradle/ide.gradle' apply from: 'gradle/forbidden-dependencies.gradle' apply from: 'gradle/formatting.gradle' +apply from: 'gradle/rewrite.gradle' apply from: 'gradle/local-distribution.gradle' apply from: 'gradle/run.gradle' apply from: 'gradle/missing-javadoc.gradle' @@ -76,7 +75,31 @@ allprojects { version = VersionProperties.getOpenSearch() description = "OpenSearch subproject ${project.path}" } - +allprojects { + configurations.configureEach { + resolutionStrategy { + force( + 'org.jetbrains:annotations:26.0.2', + 'org.apache.commons:commons-text:1.13.1', + 'org.ow2.asm:asm:9.8', + 'org.ow2.asm:asm-util:9.8', + ) + eachDependency { details -> + if (details.requested.group.startsWith('com.fasterxml.jackson')) { + details.useVersion '2.17.3' + } + if (details.requested.group == 'org.openrewrite.recipe' && + details.requested.name == 'rewrite-java-dependencies') { + details.useVersion '1.37.0' + } + if (details.requested.group == 'com.google.errorprone' && + details.requested.name == 'error_prone_annotations') { + details.useVersion '2.36.0' + } + } + } + } +} configure(allprojects - project(':distribution:archives:integ-test-zip')) { project.pluginManager.withPlugin('nebula.maven-base-publish') { if (project.pluginManager.hasPlugin('opensearch.build') == false) { diff --git a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java index 8e246ff9ecd11..f7edc453e5670 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java @@ -142,22 +142,22 @@ public void applyZipPublicationPluginWithConfig() throws IOException, URISyntaxE // and how these tasks are chained. The problem is that there is a known gradle issue (#20301) that does // not allow for it ATM. If, however, it is fixed in the future the following is the code that can // be used... - + Project project = ProjectBuilder.builder().build(); project.getPluginManager().apply(Publish.class); // add publications via API - + // evaluate the project ((DefaultProject)project).evaluate(); - + // - Check that "validatePluginZipPom" and/or "publishPluginZipPublicationToZipStagingRepository" // tasks have dependencies on "generatePomFileForNebulaPublication". // - Check that there is the staging repository added. - + // However, due to known issue(1): https://github.com/gradle/gradle/issues/20301 // it is impossible to reach to individual tasks and work with them. // (1): https://docs.gradle.org/7.4/release-notes.html#known-issues - + // I.e.: The following code throws exception, basically any access to individual tasks fails. project.getTasks().getByName("validatePluginZipPom"); ------------------------------- */ diff --git a/client/sniffer/licenses/jackson-core-2.18.2.jar.sha1 b/client/sniffer/licenses/jackson-core-2.18.2.jar.sha1 deleted file mode 100644 index 96350c9307ae7..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fb64ccac5c27dca8819418eb4e443a9f496d9ee7 \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 47c3efdfbd2a0..de0c203360487 100644 --- a/gradle.properties +++ b/gradle.properties @@ -13,6 +13,8 @@ org.gradle.caching=true org.gradle.warning.mode=none org.gradle.parallel=true +# https://github.com/openrewrite/rewrite-gradle-plugin/issues/212 +#org.gradle.workers.max=3 org.gradle.jvmargs=-Xmx3g -XX:+HeapDumpOnOutOfMemoryError -Xss2m \ --add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED \ --add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED \ diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle index 45d63fd43e875..ad8b1725244e0 100644 --- a/gradle/formatting.gradle +++ b/gradle/formatting.gradle @@ -65,7 +65,6 @@ allprojects { // non-standard places target '**/*.java' - removeUnusedImports() importOrder( 'de.thetaphi', 'com.carrotsearch', @@ -83,7 +82,6 @@ allprojects { ) eclipse().withP2Mirrors(Map.of("https://download.eclipse.org/", "https://mirror.umd.edu/eclipse/")).configFile rootProject.file('buildSrc/formatterConfig.xml') - trimTrailingWhitespace() endWithNewline() custom 'Refuse wildcard imports', { @@ -103,7 +101,6 @@ allprojects { targetExclude '**/simple-bulk11.json', '**/simple-msearch5.json' - trimTrailingWhitespace() endWithNewline() } } diff --git a/gradle/rewrite.gradle b/gradle/rewrite.gradle new file mode 100644 index 0000000000000..bdc5fb8379f41 --- /dev/null +++ b/gradle/rewrite.gradle @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.opensearch.gradle.BuildPlugin + +/** + * # - org.openrewrite.staticanalysis.ModifierOrder + # + # - org.openrewrite.java.format.RemoveTrailingWhitespace + # - org.openrewrite.java.recipes.JavaRecipeBestPractices + # - org.openrewrite.java.recipes.RecipeNullabilityBestPractices + # - org.openrewrite.java.recipes.RecipeTestingBestPractices + # - org.openrewrite.staticanalysis.EqualsAvoidsNull + # - org.openrewrite.staticanalysis.JavaApiBestPractices + # - org.openrewrite.staticanalysis.MissingOverrideAnnotation + # - org.openrewrite.staticanalysis.RemoveUnusedLocalVariables + # - org.openrewrite.staticanalysis.RemoveUnusedPrivateFields + # - org.openrewrite.staticanalysis.StringLiteralEquality + # - org.openrewrite.staticanalysis.WrappingAndBraces + # - org.openrewrite.text.EndOfLineAtEndOfFile + */ +allprojects { + plugins.withType(BuildPlugin).whenPluginAdded { + project.apply plugin: "org.openrewrite.rewrite" + rewrite { + activeRecipe("org.opensearch.openrewrite.recipe.CodeCleanup") + configFile = file("$rootDir/gradle/rewrite.yml") + exclusions.add("**OpenSearchTestCaseTests.java") + exclusions.add("**AbstractBenchmark.java") + exclusions.add("**ScriptClassPathResolutionContext.java") + exclusions.add("**StarTreeMapper.java") + failOnDryRunResults = true + } + repositories { + mavenCentral() + } + dependencies { + rewrite("org.openrewrite.recipe:rewrite-rewrite:0.9.0") + rewrite("org.openrewrite.recipe:rewrite-static-analysis:2.12.0") + } + tasks { + rewriteDryRun { + dependsOn(check) + } + } + } +} diff --git a/gradle/rewrite.yml b/gradle/rewrite.yml new file mode 100644 index 0000000000000..af2e9a77ec9db --- /dev/null +++ b/gradle/rewrite.yml @@ -0,0 +1,13 @@ +type: specs.openrewrite.org/v1beta/recipe +name: org.opensearch.openrewrite.recipe.CodeCleanup +displayName: CodeCleanup +description: Automatically cleanup code, e.g. remove unnecessary parentheses, simplify expressions. +recipeList: + - org.openrewrite.java.RemoveUnusedImports +# - org.openrewrite.staticanalysis.RemoveUnusedPrivateMethods +# - org.openrewrite.java.format.RemoveTrailingWhitespace +# - org.openrewrite.staticanalysis.EmptyBlock +# - org.openrewrite.staticanalysis.RemoveCallsToSystemGc +# - org.openrewrite.staticanalysis.RemoveUnusedLocalVariables +# - org.openrewrite.staticanalysis.RemoveUnusedPrivateFields +# - org.openrewrite.staticanalysis.UnnecessaryThrows diff --git a/libs/core/licenses/jackson-core-2.18.2.jar.sha1 b/libs/core/licenses/jackson-core-2.18.2.jar.sha1 deleted file mode 100644 index 96350c9307ae7..0000000000000 --- a/libs/core/licenses/jackson-core-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fb64ccac5c27dca8819418eb4e443a9f496d9ee7 \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java index 552945d085884..987434a85d2f1 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java @@ -744,7 +744,7 @@ public XContentBuilder utf8Value(byte[] bytes, int offset, int length) throws IO /** * Write a time-based field and value, if the passed timeValue is null a * null value is written, otherwise a date transformers lookup is performed. - + * @throws IllegalArgumentException if there is no transformers for the type of object */ public XContentBuilder timeField(String name, Object timeValue) throws IOException { @@ -772,7 +772,7 @@ public XContentBuilder timeField(String name, String readableName, long value) t /** * Write a time-based value, if the value is null a null value is written, * otherwise a date transformers lookup is performed. - + * @throws IllegalArgumentException if there is no transformers for the type of object */ public XContentBuilder timeValue(Object timeValue) throws IOException { diff --git a/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java b/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java index 828d4b7de450e..c05f7a9816839 100644 --- a/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java +++ b/libs/dissect/src/main/java/org/opensearch/dissect/DissectParser.java @@ -195,18 +195,18 @@ public DissectParser(String pattern, String appendSeparator) { */ public Map parse(String inputString) { /* - + This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for (the delimiter) is generally small and rare the naive approach is efficient. - + In this case the string that is walked is the input string, and the string being searched for is the current delimiter. For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the input string. At class construction the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched. - + There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d. diff --git a/libs/x-content/licenses/jackson-core-2.18.2.jar.sha1 b/libs/x-content/licenses/jackson-core-2.18.2.jar.sha1 deleted file mode 100644 index 96350c9307ae7..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fb64ccac5c27dca8819418eb4e443a9f496d9ee7 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 deleted file mode 100644 index 8b946b98ddbf9..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4870757eff0344130f60e3ddb882b2336640f73 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 deleted file mode 100644 index 9fbdb9b3a2506..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -44caf62d743bb5e5876e95efba5a55a1cab1b0db \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 deleted file mode 100644 index 9dac9ee8e1e72..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d000e13505d1cf564371516fa3d5b8769a779dc9 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.18.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.18.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 deleted file mode 100644 index 7b9ab1d1e08d1..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b6ff96adf421f4c6edbd694e797dd8fe434510a \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-9.7.jar.sha1 b/modules/lang-expression/licenses/asm-9.7.jar.sha1 deleted file mode 100644 index 84c9a9703af6d..0000000000000 --- a/modules/lang-expression/licenses/asm-9.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -073d7b3086e14beb604ced229c302feff6449723 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-9.7.jar.sha1 b/modules/lang-painless/licenses/asm-9.7.jar.sha1 deleted file mode 100644 index 84c9a9703af6d..0000000000000 --- a/modules/lang-painless/licenses/asm-9.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -073d7b3086e14beb604ced229c302feff6449723 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.7.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.7.jar.sha1 deleted file mode 100644 index 37c0d27efe46f..0000000000000 --- a/modules/lang-painless/licenses/asm-util-9.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0655519f24d92af2202cb681cd7c1569df6ead6 \ No newline at end of file diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java index 740a49e6a9a77..6568b26241e7f 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ContextExampleTests.java @@ -47,12 +47,12 @@ public class ContextExampleTests extends ScriptTestCase { // **** Docs Generator Code **** /* - + import java.io.FileWriter; import java.io.IOException; - + public class Generator { - + public final static String[] theatres = new String[] {"Down Port", "Graye", "Skyline", "Courtyard"}; public final static String[] plays = new String[] {"Driving", "Pick It Up", "Sway and Pull", "Harriot", "The Busline", "Ants Underground", "Exploria", "Line and Single", "Shafted", "Sunnyside Down", @@ -61,7 +61,7 @@ public class Generator { "Joel Madigan", "Jessica Brown", "Baz Knight", "Jo Hangum", "Rachel Grass", "Phoebe Miller", "Sarah Notch", "Brayden Green", "Joshua Iller", "Jon Hittle", "Rob Kettleman", "Laura Conrad", "Simon Hower", "Nora Blue", "Mike Candlestick", "Jacey Bell"}; - + public static void writeSeat(FileWriter writer, int id, String theatre, String play, String[] actors, String date, String time, int row, int number, double cost, boolean sold) throws IOException { StringBuilder builder = new StringBuilder(); @@ -94,11 +94,11 @@ public static void writeSeat(FileWriter writer, int id, String theatre, String p builder.append(" }\n"); writer.write(builder.toString()); } - + public static void main(String args[]) throws IOException { FileWriter writer = new FileWriter("/home/jdconrad/test/seats.json"); int id = 0; - + for (int playCount = 0; playCount < 12; ++playCount) { String play = plays[playCount]; String theatre; @@ -106,7 +106,7 @@ public static void main(String args[]) throws IOException { int startMonth; int endMonth; String time; - + if (playCount == 0) { theatre = theatres[0]; actor = new String[] {actors[0], actors[1], actors[2], actors[3]}; @@ -184,10 +184,10 @@ public static void main(String args[]) throws IOException { } else { throw new RuntimeException("too many plays"); } - + int rows; int number; - + if (playCount < 6) { rows = 3; number = 12; @@ -200,32 +200,32 @@ public static void main(String args[]) throws IOException { } else { throw new RuntimeException("too many seats"); } - + for (int month = startMonth; month <= endMonth; ++month) { for (int day = 1; day <= 14; ++day) { for (int row = 1; row <= rows; ++row) { for (int count = 1; count <= number; ++count) { String date = "2018-" + month + "-" + day; double cost = (25 - row) * 1.25; - + writeSeat(writer, ++id, theatre, play, actor, date, time, row, count, cost, false); } } } } } - + writer.write("\n"); writer.close(); } } - + */ // **** Initial Mappings **** /* - + curl -X PUT "localhost:9200/seats" -H 'Content-Type: application/json' -d' { "mappings": { @@ -246,13 +246,13 @@ public static void main(String args[]) throws IOException { } } ' - + */ // Create Ingest to Modify Dates: /* - + curl -X PUT "localhost:9200/_ingest/pipeline/seats" -H 'Content-Type: application/json' -d' { "description": "update datetime for seats", @@ -265,7 +265,7 @@ public static void main(String args[]) throws IOException { ] } ' - + */ public void testIngestProcessorScript() { @@ -304,9 +304,9 @@ public void testIngestProcessorScript() { // Post Generated Data: /* - + curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/home/jdconrad/test/seats.json" - + */ // Use script_fields API to add two extra fields to the hits diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java index d96e3212e05a2..aca00b42a9331 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -94,7 +94,7 @@ public void testDCGAt() { /* Check with normalization: to get the maximal possible dcg, sort documents by relevance in descending order - + rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) --------------------------------------------------------------------------------------- 1 | 3 | 7.0 | 1.0  | 7.0 @@ -103,7 +103,7 @@ public void testDCGAt() { 4 | 2 | 3.0 | 2.321928094887362 | 1.2920296742201793 5 | 1 | 1.0 | 2.584962500721156  | 0.38685280723454163 6 | 0 | 0.0 | 2.807354922057604  | 0.0 - + idcg = 14.595390756454922 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); @@ -146,7 +146,7 @@ public void testDCGAtSixMissingRatings() { /* Check with normalization: to get the maximal possible dcg, sort documents by relevance in descending order - + rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) ---------------------------------------------------------------------------------------- 1 | 3 | 7.0 | 1.0  | 7.0 @@ -155,7 +155,7 @@ public void testDCGAtSixMissingRatings() { 4 | 1 | 1.0 | 2.321928094887362   | 0.43067655807339 5 | n.a | n.a | n.a.  | n.a. 6 | n.a | n.a | n.a  | n.a - + idcg = 13.347184833073591 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); @@ -203,7 +203,7 @@ public void testDCGAtFourMoreRatings() { /* Check with normalization: to get the maximal possible dcg, sort documents by relevance in descending order - + rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) --------------------------------------------------------------------------------------- 1 | 3 | 7.0 | 1.0  | 7.0 @@ -213,7 +213,7 @@ public void testDCGAtFourMoreRatings() { --------------------------------------------------------------------------------------- 5 | n.a | n.a | n.a.  | n.a. 6 | n.a | n.a | n.a  | n.a - + idcg = 13.347184833073591 (sum of last column) */ dcg = new DiscountedCumulativeGain(true, null, 10); diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java index 9bc97d0213e73..288b7e370c647 100644 --- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java @@ -61,7 +61,6 @@ import org.opensearch.tasks.TaskInfo; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.transport.client.Client; -import org.hamcrest.Matcher; import org.junit.Before; import java.util.Arrays; diff --git a/plugins/arrow-flight-rpc/licenses/error_prone_annotations-2.31.0.jar.sha1 b/plugins/arrow-flight-rpc/licenses/error_prone_annotations-2.31.0.jar.sha1 deleted file mode 100644 index 4872d644799f5..0000000000000 --- a/plugins/arrow-flight-rpc/licenses/error_prone_annotations-2.31.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c3ba307b915d6d506e98ffbb49e6d2d12edad65b \ No newline at end of file diff --git a/plugins/arrow-flight-rpc/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/arrow-flight-rpc/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/arrow-flight-rpc/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/arrow-flight-rpc/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/arrow-flight-rpc/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/arrow-flight-rpc/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/identity-shiro/src/test/java/org/opensearch/identity/shiro/ShiroIdentityPluginTests.java b/plugins/identity-shiro/src/test/java/org/opensearch/identity/shiro/ShiroIdentityPluginTests.java index a15538e48bd66..e89737ae6adb7 100644 --- a/plugins/identity-shiro/src/test/java/org/opensearch/identity/shiro/ShiroIdentityPluginTests.java +++ b/plugins/identity-shiro/src/test/java/org/opensearch/identity/shiro/ShiroIdentityPluginTests.java @@ -17,7 +17,6 @@ import java.util.List; -import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThrows; diff --git a/plugins/ingestion-kinesis/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/ingestion-kinesis/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/ingestion-kinesis/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/ingestion-kinesis/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/ingestion-kinesis/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/ingestion-kinesis/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/asm-9.7.jar.sha1 b/plugins/repository-azure/licenses/asm-9.7.jar.sha1 deleted file mode 100644 index 84c9a9703af6d..0000000000000 --- a/plugins/repository-azure/licenses/asm-9.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -073d7b3086e14beb604ced229c302feff6449723 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/repository-azure/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/repository-azure/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.18.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.18.2.jar.sha1 deleted file mode 100644 index 61ee41aa8adf4..0000000000000 --- a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -069cb3b7bd34b3f7842cc4a6fd717981433bf73e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 deleted file mode 100644 index 7b9ab1d1e08d1..0000000000000 --- a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b6ff96adf421f4c6edbd694e797dd8fe434510a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.18.2.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.18.2.jar.sha1 deleted file mode 100644 index b98599718965b..0000000000000 --- a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -048c96032e5a428739e28ff04973717c032df598 \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java index 19c9af317247f..7688b3c8b93ab 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java @@ -172,7 +172,7 @@ public Tuple> client(String clientName) { * Obtains a {@code BlobServiceClient} on each invocation using the current client * settings. BlobServiceClient is thread safe and and could be cached but the settings * can change, therefore the instance might be recreated from scratch. - + * @param clientName client name * @param statsCollector statistics collector * @return the {@code BlobServiceClient} instance and context diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.18.2.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.18.2.jar.sha1 deleted file mode 100644 index a06e1d5f28425..0000000000000 --- a/plugins/repository-s3/licenses/jackson-annotations-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985d77751ebc7fce5db115a986bc9aa82f973f4a \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.18.2.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.18.2.jar.sha1 deleted file mode 100644 index eedbfff66c705..0000000000000 --- a/plugins/repository-s3/licenses/jackson-databind-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deef8697b92141fb6caf7aa86966cff4eec9b04f \ No newline at end of file diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelTelemetry.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelTelemetry.java index 0c697d2cc5e8c..606fdc317df16 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelTelemetry.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelTelemetry.java @@ -24,7 +24,7 @@ public class OTelTelemetry implements Telemetry { /** * Creates Telemetry instance - + */ /** * Creates Telemetry instance diff --git a/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 b/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 deleted file mode 100644 index 67723f6f51248..0000000000000 --- a/plugins/transport-grpc/licenses/error_prone_annotations-2.24.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -32b299e45105aa9b0df8279c74dc1edfcf313ff0 \ No newline at end of file diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/request/search/RescorerBuilderProtoUtils.java b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/request/search/RescorerBuilderProtoUtils.java index 38f22f05a94e9..70d4cdb047a08 100644 --- a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/request/search/RescorerBuilderProtoUtils.java +++ b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/request/search/RescorerBuilderProtoUtils.java @@ -37,9 +37,9 @@ protected static RescorerBuilder parseFromProto(Rescore rescoreProto) { /* RescorerBuilder rescorer = null; // TODO populate rescorerBuilder - + return rescorer; - + */ } diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/ShardOperationFailedExceptionProtoUtils.java b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/ShardOperationFailedExceptionProtoUtils.java index 929eb3b19d646..bdabec9dc28a0 100644 --- a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/ShardOperationFailedExceptionProtoUtils.java +++ b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/ShardOperationFailedExceptionProtoUtils.java @@ -8,8 +8,6 @@ package org.opensearch.plugin.transport.grpc.proto.response.exceptions; import org.opensearch.core.action.ShardOperationFailedException; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.protobufs.ObjectMap; /** diff --git a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/shardoperationfailedexception/ShardOperationFailedExceptionProtoUtils.java b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/shardoperationfailedexception/ShardOperationFailedExceptionProtoUtils.java index c5a26930d9300..40edb78374579 100644 --- a/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/shardoperationfailedexception/ShardOperationFailedExceptionProtoUtils.java +++ b/plugins/transport-grpc/src/main/java/org/opensearch/plugin/transport/grpc/proto/response/exceptions/shardoperationfailedexception/ShardOperationFailedExceptionProtoUtils.java @@ -11,8 +11,6 @@ import org.opensearch.action.support.replication.ReplicationResponse; import org.opensearch.core.action.ShardOperationFailedException; import org.opensearch.core.action.support.DefaultShardOperationFailedException; -import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.protobufs.ShardFailure; import org.opensearch.snapshots.SnapshotShardFailure; diff --git a/server/licenses/jackson-core-2.18.2.jar.sha1 b/server/licenses/jackson-core-2.18.2.jar.sha1 deleted file mode 100644 index 96350c9307ae7..0000000000000 --- a/server/licenses/jackson-core-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fb64ccac5c27dca8819418eb4e443a9f496d9ee7 \ No newline at end of file diff --git a/server/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 b/server/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 deleted file mode 100644 index 8b946b98ddbf9..0000000000000 --- a/server/licenses/jackson-dataformat-cbor-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4870757eff0344130f60e3ddb882b2336640f73 \ No newline at end of file diff --git a/server/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 b/server/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 deleted file mode 100644 index 9fbdb9b3a2506..0000000000000 --- a/server/licenses/jackson-dataformat-smile-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -44caf62d743bb5e5876e95efba5a55a1cab1b0db \ No newline at end of file diff --git a/server/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 b/server/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 deleted file mode 100644 index 9dac9ee8e1e72..0000000000000 --- a/server/licenses/jackson-dataformat-yaml-2.18.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d000e13505d1cf564371516fa3d5b8769a779dc9 \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index e2db9f85131a9..6f26515e7b75e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -1617,7 +1617,7 @@ public void testOngoingRecoveryAndClusterManagerFailOver() throws Exception { /* Shard assignment is stuck because recovery is blocked at CLEAN_FILES stage. Once, it times out after 60s the replica shards get assigned. https://github.com/opensearch-project/OpenSearch/issues/18098. - + Stack trace: Caused by: org.opensearch.transport.ReceiveTimeoutTransportException: [node_t3][127.0.0.1:56648][internal:index/shard/recovery/clean_files] request_id [20] timed out after [60026ms] at org.opensearch.transport.TransportService$TimeoutHandler.run(TransportService.java:1399) ~[main/:?] diff --git a/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemoteDualReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemoteDualReplicationIT.java index d046f41ce0590..7a721c395df60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemoteDualReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemoteDualReplicationIT.java @@ -232,14 +232,14 @@ public void testRemotePrimaryDocRepAndRemoteReplica() throws Exception { public void testRetentionLeasePresentOnDocrepReplicaButNotRemote() throws Exception { /* Reducing indices.memory.shard_inactive_time to force a flush and trigger translog sync, instead of relying on Global CKP Sync action which doesn't run on remote enabled copies - + Under steady state, RetentionLeases would be on (GlobalCkp + 1) on a docrep enabled shard copy and (GlobalCkp) for a remote enabled shard copy. This is because we block translog sync on remote enabled shard copies during the GlobalCkpSync background task. - + RLs on remote enabled copies are brought up to (GlobalCkp + 1) upon a flush request issued by IndexingMemoryController when the shard becomes inactive after SHARD_INACTIVE_TIME_SETTING interval. - + Flush triggers a force sync of translog which bumps the RetentionLease sequence number along with it */ extraSettings = Settings.builder().put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.getKey(), "3s").build(); @@ -668,10 +668,10 @@ public void testFailoverRemotePrimaryToDocrepReplicaReseedToRemotePrimary() thro /* Performs the same experiment as testRemotePrimaryDocRepReplica. - + This ensures that the primary shard for the index has moved over to remote enabled node whereas the replica copy is still left behind on the docrep nodes - + At this stage, segrep lag computation shouldn't consider the docrep shard copy while calculating bytes lag */ public void testZeroSegrepLagForShardsWithMixedReplicationGroup() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java index 67316ed0e6e6b..b518cc7f7bab6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java @@ -16,8 +16,6 @@ import java.nio.file.Path; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; - @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class RemoteIndexPrimaryRelocationIT extends IndexPrimaryRelocationIT { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java index 1961b0fa43705..0f0bbe7dcab4b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java @@ -26,7 +26,6 @@ import java.nio.file.Path; import static org.opensearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_CHUNK_SIZE_SETTING; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class RemoteIndexRecoveryIT extends IndexRecoveryIT { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java index 6cfc76b7e3223..ab133c28e1ef7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java @@ -16,8 +16,6 @@ import java.nio.file.Path; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; - /** * This class executes the SegmentReplicationPressureIT suite with remote store integration enabled. */ diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index ccb4af8386472..a1e2f1ee32e99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -427,20 +427,20 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { } /* - + [1, 2] [2, 3] [3, 4] [4, 5] [5, 6] - + 1 - count: 1 - sum: 1 2 - count: 2 - sum: 4 3 - count: 2 - sum: 6 4 - count: 2 - sum: 8 5 - count: 2 - sum: 10 6 - count: 1 - sum: 6 - + */ public void testScriptSingleValue() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index 49031bfd3fc1d..f743f5e737d09 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -414,20 +414,20 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { } /* - + [1, 2] [2, 3] [3, 4] [4, 5] [5, 6] - + 1 - count: 1 - sum: 1 2 - count: 2 - sum: 4 3 - count: 2 - sum: 6 4 - count: 2 - sum: 8 5 - count: 2 - sum: 10 6 - count: 1 - sum: 6 - + */ public void testScriptSingleValue() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 5483db285dded..fe07b31238787 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -614,7 +614,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { [9, 10] [10, 11] [11, 12] - + r1: 2 r2: 3, 3, 4, 4, 5, 5 r3: 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12 @@ -769,7 +769,7 @@ public void testScriptMultiValued() throws Exception { [8, 9] [9, 10] [10, 11] - + r1: 1, 2, 2 r2: 3, 3, 4, 4, 5, 5 r3: 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11 diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java index ee4622a7d0f40..6894b4c7f3654 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java @@ -35,7 +35,6 @@ import static org.opensearch.index.remote.RemoteStoreEnums.DataCategory.SEGMENTS; import static org.opensearch.index.remote.RemoteStoreEnums.DataType.LOCK_FILES; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.comparesEqualTo; import static org.hamcrest.Matchers.is; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java index e84de36df2fca..60dc20aeceac8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java @@ -51,7 +51,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.snapshots.SnapshotsService.MAX_SHARDS_ALLOWED_IN_STATUS_API; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java index bb5cc89d4e1d5..cb2a408877013 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java @@ -17,8 +17,6 @@ import java.nio.file.Path; -import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; - @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class SystemRepositoryIT extends AbstractSnapshotIntegTestCase { protected Path absolutePath; diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/AwarenessReplicaBalance.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/AwarenessReplicaBalance.java index 538d49d4e4701..9f52b08792e1a 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/AwarenessReplicaBalance.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/AwarenessReplicaBalance.java @@ -80,14 +80,14 @@ private void setAwarenessAttributes(List awarenessAttributes) { /* For a cluster having zone as awareness attribute , it will return the size of zones if set it forced awareness attributes - + If there are multiple forced awareness attributes, it will return size of the largest list, as all copies of data is supposed to get distributed amongst those. - + cluster.routing.allocation.awareness.attributes: rack_id , zone cluster.routing.allocation.awareness.force.zone.values: zone1, zone2 cluster.routing.allocation.awareness.force.rack_id.values: rack_id1, rack_id2, rack_id3 - + In this case, awareness attributes would be 3. */ public int maxAwarenessAttributes() { diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index bd5b694f4fe41..42218310a3a12 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -66,7 +66,6 @@ import java.util.Map; import java.util.Set; -import static org.opensearch.cluster.action.shard.ShardStateAction.FOLLOW_UP_REROUTE_PRIORITY_SETTING; import static org.opensearch.cluster.routing.allocation.ConstraintTypes.CLUSTER_PRIMARY_SHARD_BALANCE_CONSTRAINT_ID; import static org.opensearch.cluster.routing.allocation.ConstraintTypes.CLUSTER_PRIMARY_SHARD_REBALANCE_CONSTRAINT_ID; import static org.opensearch.cluster.routing.allocation.ConstraintTypes.INDEX_PRIMARY_SHARD_BALANCE_CONSTRAINT_ID; diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/command/CancelAllocationCommand.java index a07f3eb9d95e1..5e35f587e7fa1 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -116,7 +116,7 @@ public String index() { } /** - + * Get the id of the shard which allocation should be canceled * @return id of the shard which allocation should be canceled */ diff --git a/server/src/main/java/org/opensearch/identity/tokens/OnBehalfOfClaims.java b/server/src/main/java/org/opensearch/identity/tokens/OnBehalfOfClaims.java index 2b37ed954e7d4..653cf9f384867 100644 --- a/server/src/main/java/org/opensearch/identity/tokens/OnBehalfOfClaims.java +++ b/server/src/main/java/org/opensearch/identity/tokens/OnBehalfOfClaims.java @@ -25,7 +25,7 @@ public class OnBehalfOfClaims { * Constructor for OnBehalfOfClaims * @param aud the Audience for the token * @param expiration_seconds the length of time in seconds the token is valid - + */ public OnBehalfOfClaims(String aud, Long expiration_seconds) { this.audience = aud; diff --git a/server/src/main/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshot.java b/server/src/main/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshot.java index 9c0ea42810e16..200a617dfc509 100644 --- a/server/src/main/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshot.java +++ b/server/src/main/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshot.java @@ -355,7 +355,7 @@ public String snapshot() { /* Returns list of files in the shard - + @return list of files */ diff --git a/server/src/main/java/org/opensearch/index/translog/RemoteFsTranslog.java b/server/src/main/java/org/opensearch/index/translog/RemoteFsTranslog.java index e697e16d5e8a0..a9a03428d2a7e 100644 --- a/server/src/main/java/org/opensearch/index/translog/RemoteFsTranslog.java +++ b/server/src/main/java/org/opensearch/index/translog/RemoteFsTranslog.java @@ -215,7 +215,7 @@ static void download(TranslogTransferManager translogTransferManager, Path locat In Primary to Primary relocation , there can be concurrent upload and download of translog. While translog files are getting downloaded by new primary, it might hence be deleted by the primary Hence we retry if tlog/ckp files are not found . - + This doesn't happen in last download , where it is ensured that older primary has stopped modifying tlog data. */ IOException ex = null; diff --git a/server/src/main/java/org/opensearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/opensearch/persistent/PersistentTasksClusterService.java index eb187224ebf07..4cab2bdd979de 100644 --- a/server/src/main/java/org/opensearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/opensearch/persistent/PersistentTasksClusterService.java @@ -372,7 +372,7 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS * @param taskName the task's name * @param taskParams the task's parameters * @param currentState the current {@link ClusterState} - + * @return a new {@link Assignment} */ private Assignment createAssignment( diff --git a/server/src/main/java/org/opensearch/plugins/SearchPlugin.java b/server/src/main/java/org/opensearch/plugins/SearchPlugin.java index 80a4619f56b64..651761e6b29e5 100644 --- a/server/src/main/java/org/opensearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/SearchPlugin.java @@ -667,7 +667,7 @@ public PipelineAggregationSpec( * @param builderReader the reader registered for this aggregation's builder. Typically, a reference to a constructor that takes a * {@link StreamInput} * @param parser reads the aggregation builder from XContent - + */ public PipelineAggregationSpec( String name, diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java index 86caa6d3b5059..d9a9471aca49e 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/heuristic/MutualInformation.java @@ -120,7 +120,7 @@ public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long + N01 / N * Math.log((N * N01) / (N0_ * N_1)) + N10 / N * Math.log((N * N10) / (N1_ * N_0)) + N00 / N * Math.log((N * N00) / (N0_ * N_0)); - + but we get many NaN if we do not take case of the 0s */ double getMITerm(double Nxy, double Nx_, double N_y, double N) { diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index b67b00bb42054..46b3abc500586 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -426,7 +426,7 @@ public void registerClusterSettingsListeners(ClusterSettings clusterSettings) { /* Scaling threadpool can provide only max and core Fixed/ResizableQueue can provide only size - + For example valid settings would be for scaling and fixed thead pool cluster.threadpool.snapshot.max : "5", cluster.threadpool.snapshot.core : "5", diff --git a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java index 5603fe4e30f9f..9a96377b24466 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite912/datacube/startree/StarTreeKeywordDocValuesFormatTests.java @@ -451,7 +451,7 @@ public void testStarKeywordDocValuesWithMissingDocsInAllSegments() throws IOExce /** * keyword1 keyword2 | [ sum, value_count, min, max[sndv]] , doc_count [null, null] | [6.0, 4.0, 1.0, 2.0, 4.0] - + */ StarTreeDocument[] expectedStarTreeDocuments = new StarTreeDocument[1]; expectedStarTreeDocuments[0] = new StarTreeDocument(new Long[] { null, null }, new Double[] { 6.0, 4.0, 1.0, 2.0, 4.0 }); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketUtilsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketUtilsTests.java index 88e281032d678..a203996d03f07 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketUtilsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketUtilsTests.java @@ -67,15 +67,15 @@ public static void main(String[] args) { final int numberOfShards = 10; final double skew = 2; // parameter of the zipf distribution final int size = 100; - + double totalWeight = 0; for (int rank = 1; rank <= numberOfUniqueTerms; ++rank) { totalWeight += weight(rank, skew); } - + int[] terms = new int[totalNumberOfTerms]; int len = 0; - + final int[] actualTopFreqs = new int[size]; for (int rank = 1; len < totalNumberOfTerms; ++rank) { int freq = (int) (weight(rank, skew) / totalWeight * totalNumberOfTerms); @@ -86,9 +86,9 @@ public static void main(String[] args) { actualTopFreqs[rank-1] = freq; } } - + final int maxTerm = terms[terms.length - 1] + 1; - + // shuffle terms Random r = new Random(0); for (int i = terms.length - 1; i > 0; --i) { @@ -104,7 +104,7 @@ public static void main(String[] args) { shards[i] = Arrays.copyOfRange(terms, upTo, upTo + (terms.length - upTo) / (numberOfShards - i)); upTo += shards[i].length; } - + final int[][] topShards = new int[numberOfShards][]; final int shardSize = BucketUtils.suggestShardSideQueueSize(size, numberOfShards); for (int shard = 0; shard < numberOfShards; ++shard) { @@ -118,7 +118,7 @@ public static void main(String[] args) { termIds[i] = i; } new InPlaceMergeSorter() { - + @Override protected void swap(int i, int j) { int tmp = termIds[i]; @@ -128,16 +128,16 @@ protected void swap(int i, int j) { freqs[i] = freqs[j]; freqs[j] = tmp; } - + @Override protected int compare(int i, int j) { return freqs[j] - freqs[i]; } }.sort(0, maxTerm); - + Arrays.fill(freqs, shardSize, freqs.length, 0); new InPlaceMergeSorter() { - + @Override protected void swap(int i, int j) { int tmp = termIds[i]; @@ -147,16 +147,16 @@ protected void swap(int i, int j) { freqs[i] = freqs[j]; freqs[j] = tmp; } - + @Override protected int compare(int i, int j) { return termIds[i] - termIds[j]; } }.sort(0, maxTerm); - + topShards[shard] = freqs; } - + final int[] computedTopFreqs = new int[size]; for (int[] freqs : topShards) { for (int i = 0; i < size; ++i) { @@ -174,7 +174,7 @@ protected int compare(int i, int j) { System.out.println("Computed freqs of top terms: " + Arrays.toString(computedTopFreqs)); System.out.println("Number of errors: " + numErrors + "/" + totalFreq); } - + private static double weight(int rank, double skew) { return 1d / Math.pow(rank, skew); }*/ diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index 0bf23bd3e2cad..a8539e81f3233 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -81,8 +81,8 @@ protected void createIdx(String keyFieldMapping) { protected void indexData() throws Exception { /* - - + + || || size = 3, shard_size = 5 || shard_size = size = 3 || ||==========||==================================================||===============================================|| || shard 1: || "1" - 5 | "2" - 4 | "3" - 3 | "4" - 2 | "5" - 1 || "1" - 5 | "3" - 3 | "2" - 4 || @@ -92,8 +92,8 @@ protected void indexData() throws Exception { || reduced: || "1" - 8 | "2" - 5 | "3" - 8 | "4" - 4 | "5" - 2 || || || || || "1" - 8, "3" - 8, "2" - 4 <= WRONG || || || "1" - 8 | "3" - 8 | "2" - 5 <= CORRECT || || - - + + */ List docs = new ArrayList<>(); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index 4f78d9166b414..ea44910bb526a 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -78,12 +78,10 @@ import static org.opensearch.test.geo.RandomShapeGenerator.xRandomRectangle; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.not; -import static com.carrotsearch.randomizedtesting.RandomizedTest.assumeTrue; public class GeoShapeQueryTests extends GeoQueryTests { protected static final String[] PREFIX_TREES = new String[] { diff --git a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java index 0c08de252e4cd..758722ea4072a 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java +++ b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java @@ -215,7 +215,7 @@ public int allocateAndCheckIndexShardHotSpots(boolean expected, int nodes, Strin SameShardAllocationDecider, causing it to breach allocation constraint on another node. We need to differentiate between such hot spots v/s actual hot spots. - + A simple check could be to ensure there is no node with shards less than allocation limit, that can accept current shard. However, in current allocation algorithm, when nodes get throttled, shards are added to @@ -224,7 +224,7 @@ ModelNodes without adding them to actual cluster (RoutingNodes). As a result, weight function in balancer. RoutingNodes with {@link count} < {@link limit} may not have had the same count in the corresponding ModelNode seen by weight function. We hence use the following alternate check -- - + Given the way {@link limit} is defined, we should not have hot spots if *all* nodes are eligible to accept the shard. A hot spot is acceptable, if either all peer nodes have {@link count} > {@link limit}, or if even one node is