Skip to content

Commit

Permalink
Merge branch 'single-node-discovery' into single-node-bootstrap-checks
Browse files Browse the repository at this point in the history
* single-node-discovery:
  Clear the interrupt flag before joining
  Migrate to max line length of 100
  Docs: Corrected path to elasticsearch-plugin (elastic#23622)
  Docs: Add comma to reverse nested agg snippet
  Fix third-party audit task for Gradle 3.4 (elastic#23612)
  Adapter action future should restore interrupts
  Update scripting.asciidoc
  Unmark reindex as experimental
  CompletionSuggestionContext#toQuery() should also consider text if prefix/regex missing (elastic#23451)
  Docs: Specify that byte units use powers of 1024 (elastic#23574)
  Remove Settings.settingsBuilder (elastic#23575)
  Change params._source to params['_source'] in example.
  Fix example in documentation for Painless using _source. (elastic#21322)
  Remove extra line from license header
  Fix num docs to be positive in bucket deferring collector test
  Mapping: Fix NPE with scaled floats stats when field is not indexed (elastic#23528)
  • Loading branch information
jasontedor committed Mar 17, 2017
2 parents e9fed61 + 9fc2f1a commit ca6d8ee
Show file tree
Hide file tree
Showing 20 changed files with 3,538 additions and 204 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,11 @@ public class ThirdPartyAuditTask extends AntTask {
try {
ant.thirdPartyAudit(failOnUnsupportedJava: false,
failOnMissingClasses: false,
signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()),
classpath: classpath.asPath) {
fileset(dir: tmpDir)
signatures {
string(value: getClass().getResourceAsStream('/forbidden/third-party-audit.txt').getText('UTF-8'))
}
}
} catch (BuildException ignore) {}

Expand Down
2 changes: 1 addition & 1 deletion buildSrc/src/main/resources/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
suppress the check there but enforce it everywhere else. This prevents the list from getting longer even if it is
unfair. -->
<module name="LineLength">
<property name="max" value="140"/>
<property name="max" value="100"/>
</module>

<module name="AvoidStarImport" />
Expand Down
3,505 changes: 3,347 additions & 158 deletions buildSrc/src/main/resources/checkstyle_suppressions.xml

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public T actionGet() {
try {
return get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Future got interrupted", e);
} catch (ExecutionException e) {
throw rethrowExecutionException(e);
Expand Down Expand Up @@ -66,6 +67,7 @@ public T actionGet(long timeout, TimeUnit unit) {
} catch (TimeoutException e) {
throw new ElasticsearchTimeoutException(e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Future got interrupted", e);
} catch (ExecutionException e) {
throw rethrowExecutionException(e);
Expand Down Expand Up @@ -100,4 +102,5 @@ public void onFailure(Exception e) {
}

protected abstract T convert(L listenerResponse);

}
Original file line number Diff line number Diff line change
Expand Up @@ -265,11 +265,16 @@ public FieldStats<?> stats(IndexReader reader) throws IOException {
if (stats == null) {
return null;
}
return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(),
if (stats.hasMinMax()) {
return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(),
stats.getSumDocFreq(), stats.getSumTotalTermFreq(),
stats.isSearchable(), stats.isAggregatable(),
stats.getMinValue() == null ? null : stats.getMinValue() / scalingFactor,
stats.getMaxValue() == null ? null : stats.getMaxValue() / scalingFactor);
stats.getMinValue() / scalingFactor,
stats.getMaxValue() / scalingFactor);
}
return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(),
stats.getSumDocFreq(), stats.getSumTotalTermFreq(),
stats.isSearchable(), stats.isAggregatable());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.elasticsearch.search.suggest.completion;

import org.apache.lucene.search.suggest.document.CompletionQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
Expand Down Expand Up @@ -77,15 +78,7 @@ CompletionQuery toQuery() {
CompletionFieldMapper.CompletionFieldType fieldType = getFieldType();
final CompletionQuery query;
if (getPrefix() != null) {
if (fuzzyOptions != null) {
query = fieldType.fuzzyQuery(getPrefix().utf8ToString(),
Fuzziness.fromEdits(fuzzyOptions.getEditDistance()),
fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(),
fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(),
fuzzyOptions.isUnicodeAware());
} else {
query = fieldType.prefixQuery(getPrefix());
}
query = createCompletionQuery(getPrefix(), fieldType);
} else if (getRegex() != null) {
if (fuzzyOptions != null) {
throw new IllegalArgumentException("can not use 'fuzzy' options with 'regex");
Expand All @@ -95,8 +88,10 @@ CompletionQuery toQuery() {
}
query = fieldType.regexpQuery(getRegex(), regexOptions.getFlagsValue(),
regexOptions.getMaxDeterminizedStates());
} else if (getText() != null) {
query = createCompletionQuery(getText(), fieldType);
} else {
throw new IllegalArgumentException("'prefix' or 'regex' must be defined");
throw new IllegalArgumentException("'prefix/text' or 'regex' must be defined");
}
if (fieldType.hasContextMappings()) {
ContextMappings contextMappings = fieldType.getContextMappings();
Expand All @@ -105,4 +100,18 @@ CompletionQuery toQuery() {
return query;
}

private CompletionQuery createCompletionQuery(BytesRef prefix, CompletionFieldMapper.CompletionFieldType fieldType) {
final CompletionQuery query;
if (fuzzyOptions != null) {
query = fieldType.fuzzyQuery(prefix.utf8ToString(),
Fuzziness.fromEdits(fuzzyOptions.getEditDistance()),
fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(),
fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(),
fuzzyOptions.isUnicodeAware());
} else {
query = fieldType.prefixQuery(prefix);
}
return query;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.action.support;

import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;

import java.util.Objects;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

public class AdapterActionFutureTests extends ESTestCase {

public void testInterruption() throws Exception {
final AdapterActionFuture<String, Integer> adapter =
new AdapterActionFuture<String, Integer>() {
@Override
protected String convert(final Integer listenerResponse) {
return Objects.toString(listenerResponse);
}
};

// test all possible methods that can be interrupted
final Runnable runnable = () -> {
final int method = randomIntBetween(0, 4);
switch (method) {
case 0:
adapter.actionGet();
break;
case 1:
adapter.actionGet("30s");
break;
case 2:
adapter.actionGet(30000);
break;
case 3:
adapter.actionGet(TimeValue.timeValueSeconds(30));
break;
case 4:
adapter.actionGet(30, TimeUnit.SECONDS);
break;
default:
throw new AssertionError(method);
}
};

final CyclicBarrier barrier = new CyclicBarrier(2);
final Thread main = Thread.currentThread();
final Thread thread = new Thread(() -> {
try {
barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
main.interrupt();
});
thread.start();

final AtomicBoolean interrupted = new AtomicBoolean();

barrier.await();

try {
runnable.run();
} catch (final IllegalStateException e) {
interrupted.set(Thread.interrupted());
}
// we check this here instead of in the catch block to ensure that the catch block executed
assertTrue(interrupted.get());

thread.join();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
Expand Down Expand Up @@ -143,6 +144,15 @@ public void testStats() throws IOException {
assertNull(ft.stats(reader));
}
Document doc = new Document();
doc.add(new StoredField("scaled_float", -1));
w.addDocument(doc);
try (DirectoryReader reader = DirectoryReader.open(w)) {
// field exists, but has no point values
FieldStats<?> stats = ft.stats(reader);
assertFalse(stats.hasMinMax());
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
}
LongPoint point = new LongPoint("scaled_float", -1);
doc.add(point);
w.addDocument(doc);
Expand All @@ -152,7 +162,7 @@ public void testStats() throws IOException {
FieldStats<?> stats = ft.stats(reader);
assertEquals(-1/ft.getScalingFactor(), stats.getMinValue());
assertEquals(10/ft.getScalingFactor(), stats.getMaxValue());
assertEquals(2, stats.getMaxDoc());
assertEquals(3, stats.getMaxDoc());
}
w.deleteAll();
try (DirectoryReader reader = DirectoryReader.open(w)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public class BestBucketsDeferringCollectorTests extends AggregatorTestCase {
public void testReplay() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
int numDocs = randomInt(128);
int numDocs = randomIntBetween(1, 128);
int maxNumValues = randomInt(16);
for (int i = 0; i < numDocs; i++) {
Document document = new Document();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,10 @@
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
Expand Down Expand Up @@ -116,6 +114,36 @@ public void testPrefix() throws Exception {
assertSuggestions("foo", prefix, "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");
}

/**
* test that suggestion works if prefix is either provided via {@link CompletionSuggestionBuilder#text(String)} or
* {@link SuggestBuilder#setGlobalText(String)}
*/
public void testTextAndGlobalText() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
int numDocs = 10;
List<IndexRequestBuilder> indexRequestBuilders = new ArrayList<>();
for (int i = 1; i <= numDocs; i++) {
indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i).setSource(jsonBuilder().startObject().startObject(FIELD)
.field("input", "suggestion" + i).field("weight", i).endObject().endObject()));
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder noText = SuggestBuilders.completionSuggestion(FIELD);
SearchResponse searchResponse = client().prepareSearch(INDEX)
.suggest(new SuggestBuilder().addSuggestion("foo", noText).setGlobalText("sugg")).execute().actionGet();
assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");

CompletionSuggestionBuilder withText = SuggestBuilders.completionSuggestion(FIELD).text("sugg");
searchResponse = client().prepareSearch(INDEX)
.suggest(new SuggestBuilder().addSuggestion("foo", withText)).execute().actionGet();
assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");

// test that suggestion text takes precedence over global text
searchResponse = client().prepareSearch(INDEX)
.suggest(new SuggestBuilder().addSuggestion("foo", withText).setGlobalText("bogus")).execute().actionGet();
assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");
}

public void testRegex() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
Expand Down Expand Up @@ -217,7 +245,7 @@ public void testSuggestDocument() throws Exception {
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertSearchHit(option.getHit(), hasId("" + id));
assertSearchHit(option.getHit(), hasScore(((float) id)));
assertSearchHit(option.getHit(), hasScore((id)));
assertNotNull(option.getHit().getSourceAsMap());
id--;
}
Expand Down Expand Up @@ -252,7 +280,7 @@ public void testSuggestDocumentNoSource() throws Exception {
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertSearchHit(option.getHit(), hasId("" + id));
assertSearchHit(option.getHit(), hasScore(((float) id)));
assertSearchHit(option.getHit(), hasScore((id)));
assertNull(option.getHit().getSourceAsMap());
id--;
}
Expand Down Expand Up @@ -289,7 +317,7 @@ public void testSuggestDocumentSourceFiltering() throws Exception {
for (CompletionSuggestion.Entry.Option option : options) {
assertThat(option.getText().toString(), equalTo("suggestion" + id));
assertSearchHit(option.getHit(), hasId("" + id));
assertSearchHit(option.getHit(), hasScore(((float) id)));
assertSearchHit(option.getHit(), hasScore((id)));
assertNotNull(option.getHit().getSourceAsMap());
Set<String> sourceFields = option.getHit().getSourceAsMap().keySet();
assertThat(sourceFields, contains("a"));
Expand Down
2 changes: 0 additions & 2 deletions docs/java-api/docs/update-by-query.asciidoc
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
[[docs-update-by-query]]
== Update By Query API

experimental[The update-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible]

The simplest usage of `updateByQuery` updates each
document in an index without changing the source. This usage enables
<<picking-up-a-new-property,picking up a new property>> or another online
Expand Down
2 changes: 1 addition & 1 deletion docs/plugins/plugin-script.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ sudo bin/elasticsearch-plugin -h
.Running as root
=====================
If Elasticsearch was installed using the deb or rpm package then run
`/usr/share/elasticsearch-plugin` as `root` so it can write to the appropriate files on disk.
`/usr/share/elasticsearch/bin/elasticsearch-plugin` as `root` so it can write to the appropriate files on disk.
Otherwise run `bin/elasticsearch-plugin` as the user that owns all of the Elasticsearch
files.
=====================
Expand Down
2 changes: 1 addition & 1 deletion docs/plugins/repository-azure.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ Example using Java:
[source,java]
----
client.admin().cluster().preparePutRepository("my_backup_java1")
.setType("azure").setSettings(Settings.settingsBuilder()
.setType("azure").setSettings(Settings.builder()
.put(Storage.CONTAINER, "backup-container")
.put(Storage.CHUNK_SIZE, new ByteSizeValue(32, ByteSizeUnit.MB))
).get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ the issue documents as nested documents. The mapping could look like:
"issue" : {
"properties" : {
"tags" : { "type" : "text" }
"tags" : { "type" : "text" },
"comments" : { <1>
"type" : "nested"
"type" : "nested",
"properties" : {
"username" : { "type" : "keyword" },
"comment" : { "type" : "text" }
Expand Down
4 changes: 2 additions & 2 deletions docs/reference/api-conventions.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -506,8 +506,8 @@ the unit, like `2d` for 2 days. The supported units are:
=== Byte size units

Whenever the byte size of data needs to be specified, eg when setting a buffer size
parameter, the value must specify the unit, like `10kb` for 10 kilobytes. The
supported units are:
parameter, the value must specify the unit, like `10kb` for 10 kilobytes. Note that
these units use powers of 1024, so `1kb` means 1024 bytes. The supported units are:

[horizontal]
`b`:: Bytes
Expand Down
2 changes: 0 additions & 2 deletions docs/reference/docs/delete-by-query.asciidoc
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
[[docs-delete-by-query]]
== Delete By Query API

experimental[The delete-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible]

The simplest usage of `_delete_by_query` just performs a deletion on every
document that match a query. Here is the API:

Expand Down
Loading

0 comments on commit ca6d8ee

Please sign in to comment.