Skip to content
This repository has been archived by the owner on Aug 2, 2022. It is now read-only.

Commit

Permalink
Bumped ES and Kibana versions to v7.9.0 (#697)
Browse files Browse the repository at this point in the history
* Bug fix, support long type for aggregation (#522)

* Bug fix, support long type for aggregation

* change to datetime to JDBC format

* Opendistro Release 1.9.0 (#532)

* prepare odfe 1.9

* Fix all ES 7.8 compile and build errors

* Revert changes as Lombok is working now

* Update CustomExternalTestCluster.java

* Fix license headers check

* Use splitFieldsByMetadata to separate fields when calling SearchHit constructor

* More fixes for ODFE 1.9

* Remove todo statement

* Add ODFE 1.9.0 release notes

* Rename release notes to use 4 digit versions (#547)

* Revert changes ahead of develop branch in master (#551)

* Revert "Rename release notes to use 4 digit versions (#547)"

This reverts commit 33c6d3e.

* Revert "Opendistro Release 1.9.0 (#532)"

This reverts commit 254f2e0.

* Revert "Bug fix, support long type for aggregation (#522)"

This reverts commit fb2ed91.

* Merge all SQL repos and adjust workflows (#549) (#554)

* merge all sql repos

* fix test and build workflows

* fix workbench and odbc path

* fix workbench and odbc path

* restructure workbench dir and fix workflows

* fix workbench workflow

* fix workbench workflow

* fix workbench workflow

* fix workbench workflow

* fix workbench workflow

* revert workbench directory structure

* fix workbench workflow

* fix workbench workflow

* fix workbench workflow

* fix workbench workflow

* update workbench workflow for release

* Delete .github/ in sql-workbench directory

* Add cypress to sql-workbench

* Sync latest ODBC commits

* Sync latest workbench commits (will add cypress in separate PR)

* Add ignored ODBC libs

* add date and time support (#560)

* add date and time support

* update doc

* update doc

* Revert "add date and time support (#560)" (#567)

This reverts commit 4b33a2f.

* add error details for all server communication errors (#645)

- add null check to avoid crashing if details not initialized

* Revert "add error details for all server communication errors (#645)" (#653)

This reverts commit c11125d.

* upgrade to es7.9.0 and kibana7.9.0

* update lockfile

Co-authored-by: Peng Huo <penghuo@gmail.com>
Co-authored-by: Joshua <joshuali925@gmail.com>
Co-authored-by: Joshua Li <lijshu@amazon.com>
Co-authored-by: Jordan Wilson <37088125+jordanw-bq@users.noreply.github.com>
  • Loading branch information
5 people authored Aug 20, 2020
1 parent 96ab275 commit 9ccffc6
Show file tree
Hide file tree
Showing 21 changed files with 504 additions and 502 deletions.
6 changes: 3 additions & 3 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

buildscript {
ext {
es_version = "7.8.0"
es_version = "7.9.0"
}

repositories {
Expand Down Expand Up @@ -43,12 +43,12 @@ repositories {
}

ext {
opendistroVersion = '1.9.0'
opendistroVersion = '1.10.0'
isSnapshot = "true" == System.getProperty("build.snapshot", "true")
}

allprojects {
version = "${opendistroVersion}.1"
version = "${opendistroVersion}.0"

plugins.withId('java') {
sourceCompatibility = targetCompatibility = "1.8"
Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# permissions and limitations under the License.
#

version=1.9.0
version=1.10.0
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ public void setUpIndices() throws Exception {
initClient();
}

increaseScriptMaxCompilationsRate();
init();
}

Expand Down Expand Up @@ -154,14 +153,6 @@ protected synchronized void loadIndex(Index index) throws IOException {
}
}

/**
* Increase script.max_compilations_rate to large enough, which is only 75/5min by default.
* This issue is due to our painless script not using params passed to compiled script.
*/
private void increaseScriptMaxCompilationsRate() throws IOException {
updateClusterSetting("script.max_compilations_rate", "10000/1m", false);
}

/**
* Provide for each test to load test index, data and other setup work
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@ public void setUpIndices() throws Exception {
initClient();
}

increaseScriptMaxCompilationsRate();
enableNewQueryEngine();
init();
}
Expand Down Expand Up @@ -141,15 +140,6 @@ public static void cleanUpIndices() throws IOException {
wipeAllClusterSettings();
}

/**
* Increase script.max_compilations_rate to large enough, which is only 75/5min by default.
* This issue is due to our painless script not using params passed to compiled script.
*/
private void increaseScriptMaxCompilationsRate() throws IOException {
updateClusterSettings(
new ClusterSetting("transient", "script.max_compilations_rate", "10000/1m"));
}

private void enableNewQueryEngine() throws IOException {
boolean isEnabled = Boolean.parseBoolean(System.getProperty("enableNewEngine", "false"));
if (isEnabled) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestStatus;
Expand Down Expand Up @@ -219,7 +220,8 @@ protected SearchHit createUnmachedResult(List<Field> secondTableReturnedFields,

Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields);
hit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(docId, unmatchedId, unamatchedType, documentFields, metaFields);

searchHit.sourceRef(hit.getSourceRef());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
Expand Down Expand Up @@ -185,7 +186,8 @@ private List<SearchHit> createCombinedResults(TableInJoinRequestBuilder secondTa

Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(matchingHit.getFields(), documentFields, metaFields);
matchingHit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId,
new Text(matchingHit.getType() + "|" + secondTableHit.getType()),
documentFields, metaFields);
Expand Down Expand Up @@ -245,7 +247,8 @@ private void createKeyToResultsAndFillOptimizationStructure(
//int docid , id
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields);
hit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(resultIds, hit.getId(), new Text(hit.getType()), documentFields
, metaFields);
searchHit.sourceRef(hit.getSourceRef());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;

Expand Down Expand Up @@ -176,7 +177,8 @@ private SearchHit getMergedHit(int currentCombinedResults, String t1Alias, Strin
nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll());
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(hitFromFirstTable.getFields(), documentFields, metaFields);
matchedHit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(currentCombinedResults, hitFromFirstTable.getId() + "|"
+ matchedHit.getId(), new Text(hitFromFirstTable.getType() + "|" + matchedHit.getType()),
documentFields, metaFields);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;

Expand Down Expand Up @@ -136,7 +137,8 @@ private void fillMinusHitsFromOneField(String fieldName, Set<Object> fieldValues
fields.put(fieldName, new DocumentField(fieldName, values));
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields);
someHit.getFields().forEach((field, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) ? metaFields : documentFields).put(field, docField));
SearchHit searchHit = new SearchHit(currentId, currentId + "", new Text(someHit.getType()),
documentFields, metaFields);
searchHit.sourceRef(someHit.getSourceRef());
Expand All @@ -161,7 +163,8 @@ private void fillMinusHitsFromResults(Set<ComperableHitResult> comperableHitResu
SearchHit originalHit = result.getOriginalHit();
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(originalHit.getFields(), documentFields, metaFields);
originalHit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), new Text(originalHit.getType()),
documentFields, metaFields);
searchHit.sourceRef(originalHit.getSourceRef());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.client.Client;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;

Expand Down Expand Up @@ -68,7 +69,8 @@ private void fillInternalSearchHits(List<SearchHit> unionHits, SearchHit[] hits,
for (SearchHit hit : hits) {
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields);
hit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit searchHit = new SearchHit(currentId, hit.getId(), new Text(hit.getType()), documentFields, metaFields);
searchHit.sourceRef(hit.getSourceRef());
searchHit.getSourceAsMap().clear();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.google.common.base.Strings;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit;

import java.util.HashMap;
Expand Down Expand Up @@ -153,7 +154,8 @@ private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstD
private SearchHit cloneHit(Row<SearchHit> other) {
Map<String, DocumentField> documentFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(hit.getFields(), documentFields, metaFields);
hit.getFields().forEach((fieldName, docField) ->
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
SearchHit combined = new SearchHit(
hit.docId(),
hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import static java.util.Collections.emptyList;
import static org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
Expand Down Expand Up @@ -255,7 +256,7 @@ public static ClusterService mockClusterService(String mappings) {

public static IndexNameExpressionResolver mockIndexNameExpressionResolver() {
IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class);
when(mockResolver.concreteIndexNames(any(), any(), any())).thenAnswer(
when(mockResolver.concreteIndexNames(any(), any(), anyString())).thenAnswer(
(Answer<String[]>) invocation -> {
// Return index expression directly without resolving
Object indexExprs = invocation.getArguments()[2];
Expand Down
2 changes: 1 addition & 1 deletion sql-cli/src/odfe_sql_cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
express or implied. See the License for the specific language governing
permissions and limitations under the License.
"""
__version__ = "1.9.0.1"
__version__ = "1.10.0.0"
2 changes: 1 addition & 1 deletion sql-jdbc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ plugins {
group 'com.amazon.opendistroforelasticsearch.client'

// keep version in sync with version in Driver source
version '1.9.0.1'
version '1.10.0.0'

boolean snapshot = "true".equals(System.getProperty("build.snapshot", "true"));
if (snapshot) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
public enum Version {

// keep this in sync with the gradle version
Current(1, 9, 0, 1);
Current(1, 10, 0, 0);

private int major;
private int minor;
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 2 additions & 2 deletions sql-workbench/package.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"name": "opendistro-sql-workbench",
"version": "1.9.0.2",
"version": "1.10.0.0",
"description": "SQL Workbench",
"main": "index.js",
"license": "Apache-2.0",
"homepage": "https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-workbench",
"kibana": {
"version": "7.8.0",
"version": "7.9.0",
"templateVersion": "6.3.3"
},
"repository": {
Expand Down
Loading

0 comments on commit 9ccffc6

Please sign in to comment.