Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into fix-response-code
Browse files Browse the repository at this point in the history
  • Loading branch information
imRishN committed Nov 17, 2022
2 parents 9617ccd + 059b614 commit adb4e55
Show file tree
Hide file tree
Showing 158 changed files with 909 additions and 658 deletions.
22 changes: 20 additions & 2 deletions .github/workflows/gradle-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
files: ./codeCoverage.xml

- name: Create Comment Success
if: ${{ github.event_name == 'pull_request_target' && success() }}
if: ${{ github.event_name == 'pull_request_target' && success() && env.result == 'SUCCESS' }}
uses: peter-evans/create-or-update-comment@v2
with:
issue-number: ${{ env.pr_number }}
Expand All @@ -78,13 +78,31 @@ jobs:
* **CommitID:** ${{ env.pr_from_sha }}
- name: Extract Test Failure
if: ${{ github.event_name == 'pull_request_target' && failure() }}
if: ${{ github.event_name == 'pull_request_target' && env.result != 'SUCCESS' }}
run: |
TEST_FAILURES=`curl -s "${{ env.workflow_url }}/testReport/api/json?tree=suites\[cases\[status,className,name\]\]" | jq -r '.. | objects | select(.status=="FAILED",.status=="REGRESSION") | (.className + "." + .name)' | uniq -c | sort -n -r | head -n 10`
echo "test_failures<<EOF" >> $GITHUB_ENV
echo "$TEST_FAILURES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Create Comment Flaky
if: ${{ github.event_name == 'pull_request_target' && success() && env.result != 'SUCCESS' }}
uses: peter-evans/create-or-update-comment@v2
with:
issue-number: ${{ env.pr_number }}
body: |
### Gradle Check (Jenkins) Run Completed with:
* **RESULT:** ${{ env.result }} :grey_exclamation:
* **FLAKY TEST FAILURES:**
The following tests failed but succeeded upon retry:
```
${{ env.test_failures }}
```
* **URL:** ${{ env.workflow_url }}
* **CommitID:** ${{ env.pr_from_sha }}
Please examine the workflow log, locate, and copy-paste the failure below, then iterate to green.
Is the failure [a flaky test](https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#flaky-tests) unrelated to your change?
- name: Create Comment Failure
if: ${{ github.event_name == 'pull_request_target' && failure() }}
uses: peter-evans/create-or-update-comment@v2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ private static void addIvyRepo(Project project, String name, String url, String

project.getRepositories().exclusiveContent(exclusiveContentRepository -> {
exclusiveContentRepository.filter(config -> config.includeGroup(group));
exclusiveContentRepository.forRepositories(repos.toArray(new IvyArtifactRepository[repos.size()]));
exclusiveContentRepository.forRepositories(repos.toArray(new IvyArtifactRepository[0]));
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ public void execute(Task t) {
test.systemProperty("java.locale.providers", "SPI,JRE");
} else {
test.systemProperty("java.locale.providers", "SPI,COMPAT");
test.jvmArgs("--illegal-access=warn");
if (test.getJavaVersion().compareTo(JavaVersion.VERSION_17) < 0) {
test.jvmArgs("--illegal-access=warn");
}
}
if (test.getJavaVersion().compareTo(JavaVersion.VERSION_17) > 0) {
test.jvmArgs("-Djava.security.manager=allow");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ static ShardStats fromXContent(XContentParser parser) throws IOException {
parser.skipChildren();
}
}
return new ShardStats(successfulShards, totalShards, skippedShards, failures.toArray(new ShardSearchFailure[failures.size()]));
return new ShardStats(successfulShards, totalShards, skippedShards, failures.toArray(new ShardSearchFailure[0]));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ public PutIndexTemplateRequest alias(Alias alias) {

@Override
public String[] indices() {
return indexPatterns.toArray(new String[indexPatterns.size()]);
return indexPatterns.toArray(new String[0]);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ static CustomResponseSection2 fromXContent(XContentParser parser) throws IOExcep
values.add(parser.text());
}
assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[values.size()]));
CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[0]));
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
return responseSection2;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1028,7 +1028,7 @@ private static void setFileAttributes(final Path path, final Set<PosixFilePermis

@Override
public void close() throws IOException {
IOUtils.rm(pathsToDeleteOnShutdown.toArray(new Path[pathsToDeleteOnShutdown.size()]));
IOUtils.rm(pathsToDeleteOnShutdown.toArray(new Path[0]));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ void execute(Terminal terminal, Environment env, String pluginName, boolean purg
// finally, add the marker file
pluginPaths.add(removing);

IOUtils.rm(pluginPaths.toArray(new Path[pluginPaths.size()]));
IOUtils.rm(pluginPaths.toArray(new Path[0]));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ public ParseField(String name, String... deprecatedNames) {
} else {
final HashSet<String> set = new HashSet<>();
Collections.addAll(set, deprecatedNames);
this.deprecatedNames = set.toArray(new String[set.size()]);
this.deprecatedNames = set.toArray(new String[0]);
}
Set<String> allNames = new HashSet<>();
allNames.add(name);
Collections.addAll(allNames, this.deprecatedNames);
this.allNames = allNames.toArray(new String[allNames.size()]);
this.allNames = allNames.toArray(new String[0]);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public static FilterPath[] compile(Set<String> filters) {
}
}
}
return paths.toArray(new FilterPath[paths.size()]);
return paths.toArray(new FilterPath[0]);
}

private static FilterPath parse(final String filter, final String segment) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ private TokenFilter evaluate(String name, FilterPath[] filters) {
}

if ((nextFilters != null) && (nextFilters.isEmpty() == false)) {
return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[nextFilters.size()]), inclusive);
return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[0]), inclusive);
}
}
return NO_MATCHING;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ public void testHasParentFilter() throws Exception {
}
assertThat(parentToChildren.get(previousParentId).add(childId), is(true));
}
indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
indexRandom(true, builders.toArray(new IndexRequestBuilder[0]));

assertThat(parentToChildren.isEmpty(), equalTo(false));
for (Map.Entry<String, Set<String>> parentToChildrenEntry : parentToChildren.entrySet()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ protected void doExecute(Task task, RankEvalRequest request, ActionListener<Rank
if (summaryFields.isEmpty()) {
evaluationRequest.fetchSource(false);
} else {
evaluationRequest.fetchSource(summaryFields.toArray(new String[summaryFields.size()]), new String[0]);
evaluationRequest.fetchSource(summaryFields.toArray(new String[0]), new String[0]);
}
SearchRequest searchRequest = new SearchRequest(request.indices(), evaluationRequest);
searchRequest.indicesOptions(request.indicesOptions());
Expand All @@ -158,12 +158,7 @@ protected void doExecute(Task task, RankEvalRequest request, ActionListener<Rank
assert ratedRequestsInSearch.size() == msearchRequest.requests().size();
client.multiSearch(
msearchRequest,
new RankEvalActionListener(
listener,
metric,
ratedRequestsInSearch.toArray(new RatedRequest[ratedRequestsInSearch.size()]),
errors
)
new RankEvalActionListener(listener, metric, ratedRequestsInSearch.toArray(new RatedRequest[0]), errors)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,7 @@ void refreshAndFinish(List<Failure> indexingFailures, List<SearchFailure> search
return;
}
RefreshRequest refresh = new RefreshRequest();
refresh.indices(destinationIndices.toArray(new String[destinationIndices.size()]));
refresh.indices(destinationIndices.toArray(new String[0]));
logger.debug("[{}]: refreshing", task.getId());
client.admin().indices().refresh(refresh, new ActionListener<RefreshResponse>() {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ public void testMultipleSources() throws Exception {
int slices = randomSlices(1, 10);
int expectedSlices = expectedSliceStatuses(slices, docs.keySet());

String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]);
String[] sourceIndexNames = docs.keySet().toArray(new String[0]);

assertThat(
deleteByQuery().source(sourceIndexNames).filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ public void testMultipleSources() throws Exception {
int slices = randomSlices(1, 10);
int expectedSlices = expectedSliceStatuses(slices, docs.keySet());

String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]);
String[] sourceIndexNames = docs.keySet().toArray(new String[0]);
ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest").refresh(true).setSlices(slices);

BulkByScrollResponse response = request.get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ public void testMultipleSources() throws Exception {
int slices = randomSlices(1, 10);
int expectedSlices = expectedSliceStatuses(slices, docs.keySet());

String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]);
String[] sourceIndexNames = docs.keySet().toArray(new String[0]);
BulkByScrollResponse response = updateByQuery().source(sourceIndexNames).refresh(true).setSlices(slices).get();
assertThat(response, matcher().updated(allDocs.size()).slices(hasSize(expectedSlices)));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ public AnnotationToken[] getIntersectingAnnotations(int start, int end) {
// add 1 for the fieldvalue separator character
fieldValueOffset += fieldValueAnnotations.textMinusMarkup.length() + 1;
}
return intersectingAnnotations.toArray(new AnnotationToken[intersectingAnnotations.size()]);
return intersectingAnnotations.toArray(new AnnotationToken[0]);
}

private void append(StringBuilder dest, String content, int start, int end) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 +708,7 @@ private String[] randomUniqueIndices() {
while (uniqueIndices.size() < count) {
uniqueIndices.add(randomFrom(this.indices));
}
return uniqueIndices.toArray(new String[uniqueIndices.size()]);
return uniqueIndices.toArray(new String[0]);
}

private static void assertAllRequestsHaveBeenConsumed() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ public void testSimpleMixedFeatures() {
}
GetIndexResponse response = runWithRandomFeatureMethod(
client().admin().indices().prepareGetIndex().addIndices("idx"),
features.toArray(new Feature[features.size()])
features.toArray(new Feature[0])
);
String[] indices = response.indices();
assertThat(indices, notNullValue());
Expand Down Expand Up @@ -194,7 +194,7 @@ public void testEmptyMixedFeatures() {
}
GetIndexResponse response = runWithRandomFeatureMethod(
client().admin().indices().prepareGetIndex().addIndices("empty_idx"),
features.toArray(new Feature[features.size()])
features.toArray(new Feature[0])
);
String[] indices = response.indices();
assertThat(indices, notNullValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,6 @@ public void testInvariantsAndLogsOnDecommissionedNodes() throws Exception {

logger.info("--> starting decommissioning nodes in zone {}", 'a');
DecommissionAttribute decommissionAttribute = new DecommissionAttribute("zone", "a");
String activeNode = getNonDecommissionedNode(internalCluster().clusterService().state(), "a");
DecommissionRequest decommissionRequest = new DecommissionRequest(decommissionAttribute);
decommissionRequest.setNoDelay(true);
DecommissionResponse decommissionResponse = client().execute(DecommissionAction.INSTANCE, decommissionRequest).get();
Expand All @@ -239,6 +238,7 @@ public void testInvariantsAndLogsOnDecommissionedNodes() throws Exception {
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).get();

String decommissionedNode = randomFrom(clusterManagerNodes.get(0), dataNodes.get(0));
String activeNode = dataNodes.get(1);

ClusterService decommissionedNodeClusterService = internalCluster().getInstance(ClusterService.class, decommissionedNode);
DecommissionAttributeMetadata metadata = decommissionedNodeClusterService.state()
Expand Down Expand Up @@ -278,7 +278,7 @@ public boolean innerMatch(LogEvent event) {
);
TransportService clusterManagerTransportService = internalCluster().getInstance(
TransportService.class,
internalCluster().getClusterManagerName()
internalCluster().getClusterManagerName(activeNode)
);
MockTransportService decommissionedNodeTransportService = (MockTransportService) internalCluster().getInstance(
TransportService.class,
Expand Down Expand Up @@ -385,17 +385,27 @@ private void assertNodesRemovedAfterZoneDecommission(boolean originalClusterMana
clusterManagerNameToZone.put(clusterManagerNodes.get(2), "c");

logger.info("--> starting 4 data nodes each on zones 'a' & 'b' & 'c'");
List<String> nodes_in_zone_a = internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "a").build()
Map<String, List<String>> zoneToNodesMap = new HashMap<>();
zoneToNodesMap.put(
"a",
internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "a").build()
)
);
List<String> nodes_in_zone_b = internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "b").build()
zoneToNodesMap.put(
"b",
internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "b").build()
)
);
List<String> nodes_in_zone_c = internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "c").build()
zoneToNodesMap.put(
"c",
internalCluster().startDataOnlyNodes(
dataNodeCountPerAZ,
Settings.builder().put(commonSettings).put("node.attr.zone", "c").build()
)
);
ensureStableCluster(15);
ClusterHealthResponse health = client().admin()
Expand All @@ -420,7 +430,20 @@ private void assertNodesRemovedAfterZoneDecommission(boolean originalClusterMana
tempZones.remove(originalClusterManagerZone);
zoneToDecommission = randomFrom(tempZones);
}
String activeNode = getNonDecommissionedNode(internalCluster().clusterService().state(), zoneToDecommission);
String activeNode;
switch (zoneToDecommission) {
case "a":
activeNode = randomFrom(randomFrom(zoneToNodesMap.get("b")), randomFrom(zoneToNodesMap.get("c")));
break;
case "b":
activeNode = randomFrom(randomFrom(zoneToNodesMap.get("a")), randomFrom(zoneToNodesMap.get("c")));
break;
case "c":
activeNode = randomFrom(randomFrom(zoneToNodesMap.get("a")), randomFrom(zoneToNodesMap.get("b")));
break;
default:
throw new IllegalStateException("unexpected zone decommissioned");
}

logger.info("--> setting shard routing weights for weighted round robin");
Map<String, Double> weights = new HashMap<>(Map.of("a", 1.0, "b", 1.0, "c", 1.0));
Expand Down Expand Up @@ -631,8 +654,8 @@ public void testDecommissionStatusUpdatePublishedToAllNodes() throws ExecutionEx
assertTrue(weightedRoutingResponse.isAcknowledged());

logger.info("--> starting decommissioning nodes in zone {}", 'c');
String activeNode = randomFrom(dataNodes.get(0), dataNodes.get(1));
DecommissionAttribute decommissionAttribute = new DecommissionAttribute("zone", "c");
String activeNode = getNonDecommissionedNode(internalCluster().clusterService().state(), "c");
// Set the timeout to 0 to do immediate Decommission
DecommissionRequest decommissionRequest = new DecommissionRequest(decommissionAttribute);
decommissionRequest.setNoDelay(true);
Expand Down Expand Up @@ -860,16 +883,6 @@ public void testDecommissionFailedWithOnlyOneAttributeValue() throws Exception {
ensureStableCluster(6, TimeValue.timeValueMinutes(2));
}

private String getNonDecommissionedNode(ClusterState clusterState, String decommissionedZone) {
List<String> allNodes = new ArrayList<>();
for (DiscoveryNode node : clusterState.nodes()) {
if (node.getAttributes().get("zone").equals(decommissionedZone) == false) {
allNodes.add(node.getName());
}
}
return randomFrom(allNodes);
}

private static class WaitForFailedDecommissionState implements ClusterStateObserver.Listener {

final CountDownLatch doneLatch;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception {
logger.info(" --> checking iteration {}", i);
SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get();
assertNoFailures(afterRelocation);
assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()]));
assertSearchHits(afterRelocation, ids.toArray(new String[0]));
}
stopped.set(true);
for (Thread searchThread : searchThreads) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ public void setupSuiteScopeCluster() throws Exception {
.setSource(jsonBuilder().startObject().field("value", i * 2).field("location", "52.0945, 5.116").endObject())
);
}
indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
indexRandom(true, builders.toArray(new IndexRequestBuilder[0]));
ensureSearchable();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ public void setupSuiteScopeCluster() throws Exception {

getMultiSortDocs(builders);

indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
indexRandom(true, builders.toArray(new IndexRequestBuilder[0]));
ensureSearchable();
}

Expand Down
Loading

0 comments on commit adb4e55

Please sign in to comment.