Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/2.x' into flat-object-api
Browse files Browse the repository at this point in the history
merged 2.x changes to resolve bwc check
Signed-off-by: Mingshi Liu <mingshl@amazon.com>
  • Loading branch information
mingshl committed Mar 22, 2023
2 parents 44c3b5e + 7ea7792 commit 33209b7
Show file tree
Hide file tree
Showing 482 changed files with 8,484 additions and 4,099 deletions.
2 changes: 2 additions & 0 deletions .ci/bwcVersions
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ BWC_VERSION:
- "1.3.7"
- "1.3.8"
- "1.3.9"
- "1.3.10"
- "2.0.0"
- "2.0.1"
- "2.0.2"
Expand All @@ -62,3 +63,4 @@ BWC_VERSION:
- "2.5.0"
- "2.5.1"
- "2.6.0"
- "2.6.1"
14 changes: 0 additions & 14 deletions .github/workflows/code-hygiene.yml

This file was deleted.

20 changes: 20 additions & 0 deletions .github/workflows/github-merit-badger.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: github-merit-badger
on:
pull_request_target:
types:
- opened

jobs:
call-action:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: aws-github-ops/github-merit-badger@v0.0.98
id: merit-badger
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
badges: '[first-time-contributor,repeat-contributor,valued-contributor,seasoned-contributor,all-star-contributor,distinguished-contributor]'
thresholds: '[0,3,6,13,25,50]'
badge-type: 'achievement'
ignore-usernames: '[opensearch-ci-bot, dependabot, opensearch-trigger-bot]'
49 changes: 0 additions & 49 deletions .linelint.yml

This file was deleted.

24 changes: 24 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,44 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Added
- Add GeoTile and GeoHash Grid aggregations on GeoShapes. ([#5589](https://github.com/opensearch-project/OpenSearch/pull/5589))
- Disallow multiple data paths for search nodes ([#6427](https://github.com/opensearch-project/OpenSearch/pull/6427))
- [Segment Replication] Allocation and rebalancing based on average primary shard count per index ([#6422](https://github.com/opensearch-project/OpenSearch/pull/6422))
- The truncation limit of the OpenSearchJsonLayout logger is now configurable ([#6569](https://github.com/opensearch-project/OpenSearch/pull/6569))
- Add 'base_path' setting to File System Repository ([#6558](https://github.com/opensearch-project/OpenSearch/pull/6558))
- Return success on DeletePits when no PITs exist. ([#6544](https://github.com/opensearch-project/OpenSearch/pull/6544))
- Add node repurpose command for search nodes ([#6517](https://github.com/opensearch-project/OpenSearch/pull/6517))
- [Segment Replication] Apply backpressure when replicas fall behind ([#6563](https://github.com/opensearch-project/OpenSearch/pull/6563))

### Dependencies
- Bump `org.apache.logging.log4j:log4j-core` from 2.18.0 to 2.20.0 ([#6490](https://github.com/opensearch-project/OpenSearch/pull/6490))
- Bump `com.azure:azure-storage-common` from 12.19.3 to 12.20.0 ([#6492](https://github.com/opensearch-project/OpenSearch/pull/6492)
- Bump `snakeyaml` from 1.33 to 2.0 ([#6511](https://github.com/opensearch-project/OpenSearch/pull/6511))
- Bump `io.projectreactor.netty:reactor-netty` from 1.1.3 to 1.1.4
- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.15.2 to 0.16.11
- Bump `net.minidev:json-smart` from 2.4.8 to 2.4.9
- Bump `com.google.protobuf:protobuf-java` from 3.22.0 to 3.22.2
- Bump Netty to 4.1.90.Final ([#6677](https://github.com/opensearch-project/OpenSearch/pull/6677)
- Bump `com.diffplug.spotless` from 6.15.0 to 6.17.0
- Bump `org.apache.zookeeper:zookeeper` from 3.8.0 to 3.8.1
- Bump `net.minidev:json-smart` from 2.4.7 to 2.4.10
- Bump `org.apache.maven:maven-model` from 3.6.2 to 3.9.1

### Changed
- Require MediaType in Strings.toString API ([#6009](https://github.com/opensearch-project/OpenSearch/pull/6009))
- [Refactor] XContent base classes from xcontent to core library ([#5902](https://github.com/opensearch-project/OpenSearch/pull/5902))
- Introduce a new field type: flat_object ([#6507](https://github.com/opensearch-project/OpenSearch/pull/6507))

### Deprecated
- Map, List, and Set in org.opensearch.common.collect ([#6609](https://github.com/opensearch-project/OpenSearch/pull/6609))

### Removed

### Fixed
- Added depth check in doc parser for deep nested document ([#5199](https://github.com/opensearch-project/OpenSearch/pull/5199))
- Added equals/hashcode for named DocValueFormat.DateTime inner class ([#6357](https://github.com/opensearch-project/OpenSearch/pull/6357))
- Fixed bug for searchable snapshot to take 'base_path' of blob into account ([#6558](https://github.com/opensearch-project/OpenSearch/pull/6558))
- Fix fuzziness validation ([#5805](https://github.com/opensearch-project/OpenSearch/pull/5805))

### Security

[Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.5...2.x
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,7 @@
@State(Scope.Benchmark)
public class StringTermsSerializationBenchmark {
private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(
org.opensearch.common.collect.List.of(
new NamedWriteableRegistry.Entry(InternalAggregation.class, StringTerms.NAME, StringTerms::new)
)
List.of(new NamedWriteableRegistry.Entry(InternalAggregation.class, StringTerms.NAME, StringTerms::new))
);
@Param(value = { "1000" })
private int buckets;
Expand All @@ -75,15 +73,13 @@ public class StringTermsSerializationBenchmark {

@Setup
public void initResults() {
results = DelayableWriteable.referencing(InternalAggregations.from(org.opensearch.common.collect.List.of(newTerms(true))));
results = DelayableWriteable.referencing(InternalAggregations.from(List.of(newTerms(true))));
}

private StringTerms newTerms(boolean withNested) {
List<StringTerms.Bucket> resultBuckets = new ArrayList<>(buckets);
for (int i = 0; i < buckets; i++) {
InternalAggregations inner = withNested
? InternalAggregations.from(org.opensearch.common.collect.List.of(newTerms(false)))
: InternalAggregations.EMPTY;
InternalAggregations inner = withNested ? InternalAggregations.from(List.of(newTerms(false))) : InternalAggregations.EMPTY;
resultBuckets.add(new StringTerms.Bucket(new BytesRef("test" + i), i, inner, false, 0, DocValueFormat.RAW));
}
return new StringTerms(
Expand Down
16 changes: 1 addition & 15 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ import org.gradle.plugins.ide.eclipse.model.AccessRule
import org.gradle.plugins.ide.eclipse.model.EclipseJdt
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.gradle.api.Project;
import org.gradle.api.internal.tasks.testing.junit.JUnitTestFramework
import org.gradle.process.ExecResult;

import static org.opensearch.gradle.util.GradleUtils.maybeConfigure
Expand All @@ -54,7 +53,7 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
id "com.diffplug.spotless" version "6.15.0" apply false
id "com.diffplug.spotless" version "6.17.0" apply false
id "org.gradle.test-retry" version "1.5.1" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
Expand All @@ -76,19 +75,6 @@ allprojects {
group = 'org.opensearch'
version = VersionProperties.getOpenSearch()
description = "OpenSearch subproject ${project.path}"

afterEvaluate {
project.tasks.withType(Test) { task ->
// This is so hacky: now, by default, test tasks uses JUnit framework and always includes 'junit'
// JARs from the Gradle distribution (no ways to override this behavior). It causes JAR hell on test
// classpath, example of the report:
//
// jar1: /home/ubuntu/.gradle/caches/modules-2/files-2.1/junit/junit/4.13.2/8ac9e16d933b6fb43bc7f576336b8f4d7eb5ba12/junit-4.13.2.jar
// jar2: /home/ubuntu/.gradle/wrapper/dists/gradle-8.0-rc-1-all/2p8rgxxewg8l61n1p3vrzr9s8/gradle-8.0-rc-1/lib/junit-4.13.2.jar
//
task.getTestFrameworkProperty().convention(getProviderFactory().provider(() -> new JUnitTestFramework(task, task.getFilter(), false)));
}
}
}

configure(allprojects - project(':distribution:archives:integ-test-zip')) {
Expand Down
4 changes: 2 additions & 2 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@ dependencies {
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
api 'de.thetaphi:forbiddenapis:3.4'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.15.2'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.16.11'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
api 'org.apache.maven:maven-model:3.6.2'
api 'org.apache.maven:maven-model:3.9.1'
api 'com.networknt:json-schema-validator:1.0.73'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,9 @@ public void apply(Project project) {

Provider<DockerSupportService> dockerSupportServiceProvider = project.getGradle()
.getSharedServices()
.registerIfAbsent(
DOCKER_SUPPORT_SERVICE_NAME,
DockerSupportService.class,
spec -> spec.parameters(
params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); }
)
);
.registerIfAbsent(DOCKER_SUPPORT_SERVICE_NAME, DockerSupportService.class, spec -> spec.parameters(params -> {
params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE));
}));

// Ensure that if we are trying to run any DockerBuildTask tasks, we assert an available Docker installation exists
project.getGradle().getTaskGraph().whenReady(graph -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,9 @@ public InternalDistributionBwcSetupPlugin(ProviderFactory providerFactory) {
@Override
public void apply(Project project) {
project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class);
BuildParams.getBwcVersions()
.forPreviousUnreleased(
(BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> {
configureBwcProject(project.project(unreleasedVersion.gradleProjectPath), unreleasedVersion);
}
);
BuildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> {
configureBwcProject(project.project(unreleasedVersion.gradleProjectPath), unreleasedVersion);
});
}

private void configureBwcProject(Project project, BwcVersions.UnreleasedVersionInfo versionInfo) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,13 +197,9 @@ public void apply(Project project) {

// windows boxes get windows distributions, and linux boxes get linux distributions
if (isWindows(vmProject)) {
configureVMWrapperTasks(
vmProject,
windowsTestTasks,
depsTasks,
wrapperTask -> { vmLifecyleTasks.get(OpenSearchDistribution.Type.ARCHIVE).configure(t -> t.dependsOn(wrapperTask)); },
vmDependencies
);
configureVMWrapperTasks(vmProject, windowsTestTasks, depsTasks, wrapperTask -> {
vmLifecyleTasks.get(OpenSearchDistribution.Type.ARCHIVE).configure(t -> t.dependsOn(wrapperTask));
}, vmDependencies);
} else {
for (Entry<OpenSearchDistribution.Type, List<TaskProvider<Test>>> entry : linuxTestTasks.entrySet()) {
OpenSearchDistribution.Type type = entry.getKey();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ repositories {
mavenCentral()
}
dependencies {
implementation 'org.apache.logging.log4j:log4j-core:2.18.0'
implementation 'org.apache.logging.log4j:log4j-core:2.20.0'
}

["0.0.1", "0.0.2"].forEach { v ->
Expand Down
8 changes: 4 additions & 4 deletions buildSrc/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ spatial4j = 0.7
jts = 1.15.0
jackson = 2.14.2
jackson_databind = 2.14.2
snakeyaml = 1.33
snakeyaml = 2.0
icu4j = 70.1
supercsv = 2.4.0
# Update to 2.17.2+ is breaking OpenSearchJsonLayout (see https://issues.apache.org/jira/browse/LOG4J2-3562)
Expand All @@ -25,7 +25,7 @@ guava = 31.1-jre
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.5.0

netty = 4.1.87.Final
netty = 4.1.90.Final
joda = 2.12.2

# client dependencies
Expand All @@ -46,9 +46,9 @@ bouncycastle=1.70
randomizedrunner = 2.7.1
junit = 4.13.2
hamcrest = 2.1
mockito = 5.1.0
mockito = 5.2.0
objenesis = 3.2
bytebuddy = 1.12.22
bytebuddy = 1.14.2

# benchmark dependencies
jmh = 1.35
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ public class MainResponse {
private static final ConstructingObjectParser<MainResponse, Void> PARSER = new ConstructingObjectParser<>(
MainResponse.class.getName(),
true,
args -> { return new MainResponse((String) args[0], (Version) args[1], (String) args[2], (String) args[3]); }
args -> {
return new MainResponse((String) args[0], (Version) args[1], (String) args[2], (String) args[3]);
}
);

static {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,9 @@ public static final class FieldStatistics {
private static final ConstructingObjectParser<FieldStatistics, Void> PARSER = new ConstructingObjectParser<>(
"field_statistics",
true,
args -> { return new FieldStatistics((long) args[0], (int) args[1], (long) args[2]); }
args -> {
return new FieldStatistics((long) args[0], (int) args[1], (long) args[2]);
}
);

static {
Expand Down Expand Up @@ -411,11 +413,9 @@ public int hashCode() {

public static final class Token {

private static final ConstructingObjectParser<Token, Void> PARSER = new ConstructingObjectParser<>(
"token",
true,
args -> { return new Token((Integer) args[0], (Integer) args[1], (Integer) args[2], (String) args[3]); }
);
private static final ConstructingObjectParser<Token, Void> PARSER = new ConstructingObjectParser<>("token", true, args -> {
return new Token((Integer) args[0], (Integer) args[1], (Integer) args[2], (String) args[3]);
});
static {
PARSER.declareInt(optionalConstructorArg(), new ParseField("start_offset"));
PARSER.declareInt(optionalConstructorArg(), new ParseField("end_offset"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -628,10 +628,9 @@ public void testIndex() throws IOException {
assertEquals("index", indexResponse.getIndex());
assertEquals("with_create_op_type", indexResponse.getId());

OpenSearchStatusException exception = expectThrows(
OpenSearchStatusException.class,
() -> { execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); }
);
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> {
execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
});

assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2032,8 +2032,8 @@ public void testSimulateIndexTemplate() throws Exception {
Settings settings = Settings.builder().put("index.number_of_shards", 1).build();
CompressedXContent mappings = new CompressedXContent("{\"properties\":{\"host_name\":{\"type\":\"keyword\"}}}");
AliasMetadata alias = AliasMetadata.builder("alias").writeIndex(true).build();
Template template = new Template(settings, mappings, org.opensearch.common.collect.Map.of("alias", alias));
List<String> pattern = org.opensearch.common.collect.List.of("pattern");
Template template = new Template(settings, mappings, Map.of("alias", alias));
List<String> pattern = List.of("pattern");
ComposableIndexTemplate indexTemplate = new ComposableIndexTemplate(
pattern,
template,
Expand All @@ -2058,7 +2058,7 @@ public void testSimulateIndexTemplate() throws Exception {
AliasMetadata simulationAlias = AliasMetadata.builder("simulation-alias").writeIndex(true).build();
ComposableIndexTemplate simulationTemplate = new ComposableIndexTemplate(
pattern,
new Template(null, null, org.opensearch.common.collect.Map.of("simulation-alias", simulationAlias)),
new Template(null, null, Map.of("simulation-alias", simulationAlias)),
Collections.emptyList(),
2L,
1L,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,10 +112,9 @@ public void testReindex() throws IOException {
reindexRequest.setRefresh(true);
reindexRequest.setRequireAlias(true);

OpenSearchStatusException exception = expectThrows(
OpenSearchStatusException.class,
() -> { execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync); }
);
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> {
execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
});
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals(
"OpenSearch exception [type=index_not_found_exception, reason=no such index [dest] and [require_alias] request flag is [true] and [dest] is not an alias]",
Expand Down
Loading

0 comments on commit 33209b7

Please sign in to comment.