Skip to content

Commit

Permalink
Merge branch 'opensearch-project:main' into searchbackpressure/integt…
Browse files Browse the repository at this point in the history
…ests
  • Loading branch information
PritLadani authored Nov 21, 2022
2 parents ec08b58 + 7dc137f commit b1d0d32
Show file tree
Hide file tree
Showing 24 changed files with 34 additions and 226 deletions.
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# CHANGELOG

## [Unreleased]
### Dependencies
- Bumps `commons-compress` from 1.21 to 1.22
- Bumps `jcodings` from 1.0.57 to 1.0.58
- Bumps `google-http-client-jackson2` from 1.35.0 to 1.42.3
- Bumps `maxmind-db` from 2.0.0 to 2.1.0

All notable changes to this project are documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). See the [CONTRIBUTING guide](./CONTRIBUTING.md#Changelog) for instructions on how to add changelog entries.
Expand Down Expand Up @@ -75,11 +82,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bumps `commons-compress` from 1.21 to 1.22 ([#5104](https://github.com/opensearch-project/OpenSearch/pull/5104))
- Bump `opencensus-contrib-http-util` from 0.18.0 to 0.31.1 ([#3633](https://github.com/opensearch-project/OpenSearch/pull/3633))
- Bump `geoip2` from 3.0.1 to 3.0.2 ([#5103](https://github.com/opensearch-project/OpenSearch/pull/5103))
- Bump gradle-extra-configurations-plugin from 7.0.0 to 8.0.0 ([#4808](https://github.com/opensearch-project/OpenSearch/pull/4808))
### Changed
### Deprecated
### Removed
### Fixed
### Security

[Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.4...HEAD
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.4...2.x
[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.4...2.x
2 changes: 1 addition & 1 deletion buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ dependencies {
api 'commons-codec:commons-codec:1.15'
api 'org.apache.commons:commons-compress:1.21'
api 'org.apache.ant:ant:1.10.12'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:8.0.0'
api 'com.netflix.nebula:nebula-publishing-plugin:4.6.0'
api 'com.netflix.nebula:gradle-info-plugin:11.3.3'
api 'org.apache.rat:apache-rat:0.13'
Expand Down
2 changes: 1 addition & 1 deletion libs/cli/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
* under the License.
*/
apply plugin: 'opensearch.build'
apply plugin: 'nebula.optional-base'
apply plugin: 'com.netflix.nebula.optional-base'
apply plugin: 'opensearch.publish'

dependencies {
Expand Down
2 changes: 1 addition & 1 deletion libs/core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

import org.opensearch.gradle.info.BuildParams

apply plugin: 'nebula.optional-base'
apply plugin: 'com.netflix.nebula.optional-base'
apply plugin: 'opensearch.publish'

archivesBaseName = 'opensearch-core'
Expand Down
2 changes: 1 addition & 1 deletion libs/grok/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
dependencies {
api 'org.jruby.joni:joni:2.1.43'
// joni dependencies:
api 'org.jruby.jcodings:jcodings:1.0.57'
api 'org.jruby.jcodings:jcodings:1.0.58'

testImplementation(project(":test:framework")) {
exclude group: 'org.opensearch', module: 'opensearch-grok'
Expand Down
1 change: 0 additions & 1 deletion libs/grok/licenses/jcodings-1.0.57.jar.sha1

This file was deleted.

1 change: 1 addition & 0 deletions libs/grok/licenses/jcodings-1.0.58.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
dce27159dc0382e5f7518d4f3e499fc8396357ed
2 changes: 1 addition & 1 deletion modules/ingest-geoip/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ dependencies {
// geoip2 dependencies:
api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}")
api('com.maxmind.db:maxmind-db:2.0.0')
api('com.maxmind.db:maxmind-db:2.1.0')

testImplementation 'org.elasticsearch:geolite2-databases:20191119'
}
Expand Down
1 change: 0 additions & 1 deletion modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1

This file was deleted.

1 change: 1 addition & 0 deletions modules/ingest-geoip/licenses/maxmind-db-2.1.0.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
5fb0a7c4677ba725149ed557df9d0809d1836b80
2 changes: 1 addition & 1 deletion plugins/repository-gcs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ dependencies {
api 'com.google.oauth-client:google-oauth-client:1.33.3'
api 'com.google.api-client:google-api-client:1.34.0'
api 'com.google.http-client:google-http-client-appengine:1.41.8'
api 'com.google.http-client:google-http-client-jackson2:1.35.0'
api 'com.google.http-client:google-http-client-jackson2:1.42.3'
api 'com.google.http-client:google-http-client-gson:1.41.4'
api 'com.google.api:gax-httpjson:0.103.1'
api 'io.grpc:grpc-context:1.46.0'
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
789cafde696403b429026bf19071caf46d8c8934
2 changes: 1 addition & 1 deletion plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ dependencies {
api 'commons-cli:commons-cli:1.5.0'
api "commons-codec:commons-codec:${versions.commonscodec}"
api 'commons-collections:commons-collections:3.2.2'
api 'org.apache.commons:commons-compress:1.21'
api 'org.apache.commons:commons-compress:1.22'
api 'org.apache.commons:commons-configuration2:2.8.0'
api 'commons-io:commons-io:2.11.0'
api 'org.apache.commons:commons-lang3:3.12.0'
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
691a8b4e6cf4248c3bc72c8b719337d5cb7359fa
2 changes: 1 addition & 1 deletion server/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import org.opensearch.gradle.info.BuildParams

apply plugin: 'opensearch.build'
apply plugin: 'nebula.optional-base'
apply plugin: 'com.netflix.nebula.optional-base'
apply plugin: 'opensearch.publish'
apply plugin: 'opensearch.internal-cluster-test'

Expand Down
143 changes: 0 additions & 143 deletions server/src/main/java/org/opensearch/common/lucene/Lucene.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,20 +32,14 @@

package org.opensearch.common.lucene;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.FilterCodecReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
Expand All @@ -55,21 +49,12 @@
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafMetaData;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.VectorValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
Expand Down Expand Up @@ -142,18 +127,6 @@ public class Lucene {

private Lucene() {}

public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) {
if (version == null) {
return defaultVersion;
}
try {
return Version.parse(version);
} catch (ParseException e) {
logger.warn(() -> new ParameterizedMessage("no version match {}, default to {}", version, defaultVersion), e);
return defaultVersion;
}
}

/**
* Reads the segments infos, failing if it fails to load
*/
Expand Down Expand Up @@ -697,34 +670,6 @@ public static boolean indexExists(final Directory directory) throws IOException
return DirectoryReader.indexExists(directory);
}

/**
* Wait for an index to exist for up to {@code timeLimitMillis}. Returns
* true if the index eventually exists, false if not.
*
* Will retry the directory every second for at least {@code timeLimitMillis}
*/
public static boolean waitForIndex(final Directory directory, final long timeLimitMillis) throws IOException {
final long DELAY = 1000;
long waited = 0;
try {
while (true) {
if (waited >= timeLimitMillis) {
break;
}
if (indexExists(directory)) {
return true;
}
Thread.sleep(DELAY);
waited += DELAY;
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
// one more try after all retries
return indexExists(directory);
}

/**
* Returns {@code true} iff the given exception or
* one of it's causes is an instance of {@link CorruptIndexException},
Expand Down Expand Up @@ -1024,92 +969,4 @@ public static NumericDocValuesField newSoftDeletesField() {
return new NumericDocValuesField(SOFT_DELETES_FIELD, 1);
}

/**
* Returns an empty leaf reader with the given max docs. The reader will be fully deleted.
*/
public static LeafReader emptyReader(final int maxDoc) {
return new LeafReader() {
final Bits liveDocs = new Bits.MatchNoBits(maxDoc);

public Terms terms(String field) {
return null;
}

public NumericDocValues getNumericDocValues(String field) {
return null;
}

public BinaryDocValues getBinaryDocValues(String field) {
return null;
}

public SortedDocValues getSortedDocValues(String field) {
return null;
}

public SortedNumericDocValues getSortedNumericDocValues(String field) {
return null;
}

public SortedSetDocValues getSortedSetDocValues(String field) {
return null;
}

public NumericDocValues getNormValues(String field) {
return null;
}

public FieldInfos getFieldInfos() {
return new FieldInfos(new FieldInfo[0]);
}

public Bits getLiveDocs() {
return this.liveDocs;
}

public PointValues getPointValues(String fieldName) {
return null;
}

public void checkIntegrity() {}

public Fields getTermVectors(int docID) {
return null;
}

public int numDocs() {
return 0;
}

public int maxDoc() {
return maxDoc;
}

public void document(int docID, StoredFieldVisitor visitor) {}

protected void doClose() {}

public LeafMetaData getMetaData() {
return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
}

public CacheHelper getCoreCacheHelper() {
return null;
}

public CacheHelper getReaderCacheHelper() {
return null;
}

@Override
public VectorValues getVectorValues(String field) throws IOException {
return null;
}

@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
return null;
}
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -323,10 +323,6 @@ private boolean termArraysEquals(List<Term[]> termArrays1, List<Term[]> termArra
return true;
}

public String getField() {
return field;
}

@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,6 @@ public static Query newLenientFieldQuery(String field, RuntimeException e) {
return Queries.newMatchNoDocsQuery("failed [" + field + "] query, caused by " + message);
}

public static Query newNestedFilter() {
return not(newNonNestedFilter());
}

/**
* Creates a new non-nested docs query
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,6 @@ public WeightFactorFunction(float weight) {
this(weight, null, null);
}

public WeightFactorFunction(float weight, @Nullable String functionName) {
this(weight, null, functionName);
}

@Override
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -901,7 +901,10 @@ public void testWeightedOperationRoutingWeightUndefinedForOneZone() throws Excep
try {
ClusterState state = clusterStateForWeightedRouting(indexNames, numShards, numReplicas);

Settings setting = Settings.builder().put("cluster.routing.allocation.awareness.attributes", "zone").build();
Settings setting = Settings.builder()
.put("cluster.routing.allocation.awareness.attributes", "zone")
.put("cluster.routing.allocation.awareness.force.zone.values", "a,b,c")
.build();

threadPool = new TestThreadPool("testThatOnlyNodesSupport");
clusterService = ClusterServiceUtils.createClusterService(threadPool);
Expand Down Expand Up @@ -932,8 +935,9 @@ public void testWeightedOperationRoutingWeightUndefinedForOneZone() throws Excep
);

for (ShardIterator it : groupIterator) {
List<ShardRouting> shardRoutings = Collections.singletonList(it.nextOrNull());
for (ShardRouting shardRouting : shardRoutings) {
while (it.remaining() > 0) {
ShardRouting shardRouting = it.nextOrNull();
assertNotNull(shardRouting);
selectedNodes.add(shardRouting.currentNodeId());
}
}
Expand All @@ -950,9 +954,8 @@ public void testWeightedOperationRoutingWeightUndefinedForOneZone() throws Excep
assertFalse(weighAwayNodesInUndefinedZone);

selectedNodes = new HashSet<>();
setting = Settings.builder().put("cluster.routing.allocation.awareness.attributes", "zone").build();

// Updating weighted round robin weights in cluster state
// Updating weighted round-robin weights in cluster state
weights = Map.of("a", 0.0, "b", 1.0);

state = setWeightedRoutingWeights(state, weights);
Expand All @@ -964,11 +967,13 @@ public void testWeightedOperationRoutingWeightUndefinedForOneZone() throws Excep
groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, outstandingRequests);

for (ShardIterator it : groupIterator) {
List<ShardRouting> shardRoutings = Collections.singletonList(it.nextOrNull());
for (ShardRouting shardRouting : shardRoutings) {
while (it.remaining() > 0) {
ShardRouting shardRouting = it.nextOrNull();
assertNotNull(shardRouting);
selectedNodes.add(shardRouting.currentNodeId());
}
}

// tests that no shards are assigned to zone with weight zero
// tests shards are assigned to nodes in zone c
weighAwayNodesInUndefinedZone = true;
Expand Down
Loading

0 comments on commit b1d0d32

Please sign in to comment.